text
stringlengths 2
99.9k
| meta
dict |
|---|---|
# Next.js
Next.js provides an [official plugin][next-plugin] to simplify MDX importing
into your project.
```shell
npm install --save @next/mdx @mdx-js/loader
```
To configure MDX, add the following to your `next.config.js`:
```js
const withMDX = require('@next/mdx')()
module.exports = withMDX()
```
### Treat `.mdx` files as pages
To have Next.js treat `.mdx` files in the pages directory as pages use the `pageExtensions` property:
```js
// next.config.js
const withMDX = require('@next/mdx')({
extension: /\.mdx?$/,
})
module.exports = withMDX({
pageExtensions: ['js', 'jsx', 'mdx'],
})
```
### Use MDX for `.md` files
The Next.js MDX plugin allows for you to also use MDX parsing for `.md` files:
```js
const withMDX = require('@next/mdx')({
extension: /\.mdx?$/
})
module.exports = withMDX({
pageExtensions: ['js', 'jsx', 'md', 'mdx']
})
```
### Providing MDX Plugins
In `next.config.js` you can also provide MDX plugins from remark and rehype
```js
const withMDX = require('@next/mdx')({
options: {
remarkPlugins: [],
rehypePlugins: []
}
})
module.exports = withMDX()
```
### Typescript support
```js
const withMDX = require('@next/mdx')({
extension: /\.mdx?$/
})
module.exports = withMDX({
pageExtensions: ['js', 'jsx', 'ts', 'tsx', 'md', 'mdx']
})
```
[next-plugin]: https://github.com/vercel/next.js/tree/canary/packages/next-mdx
|
{
"pile_set_name": "Github"
}
|
using System.Drawing;
using ApiExamples.TestData.TestClasses;
namespace ApiExamples.TestData.TestBuilders
{
public class ColorItemTestBuilder
{
public string Name;
public Color Color;
public int ColorCode;
public double Value1;
public double Value2;
public double Value3;
public ColorItemTestBuilder()
{
Name = "DefaultName";
Color = Color.Black;
ColorCode = Color.Black.ToArgb();
Value1 = 1.0;
Value2 = 1.0;
Value3 = 1.0;
}
public ColorItemTestBuilder WithColor(string name, Color color)
{
Name = name;
Color = color;
return this;
}
public ColorItemTestBuilder WithColorCode(string name, int colorCode)
{
Name = name;
ColorCode = colorCode;
return this;
}
public ColorItemTestBuilder WithColorAndValues(string name, Color color, double value1, double value2,
double value3)
{
Name = name;
Color = color;
Value1 = value1;
Value2 = value2;
Value3 = value3;
return this;
}
public ColorItemTestBuilder WithColorCodeAndValues(string name, int colorCode, double value1, double value2,
double value3)
{
Name = name;
ColorCode = colorCode;
Value1 = value1;
Value2 = value2;
Value3 = value3;
return this;
}
public ColorItemTestClass Build()
{
return new ColorItemTestClass(Name, Color, ColorCode, Value1, Value2, Value3);
}
}
}
|
{
"pile_set_name": "Github"
}
|
#ifndef ASSIMP_REVISION_H_INC
#define ASSIMP_REVISION_H_INC
#define GitVersion 0x0
#define GitBranch "master"
#endif // ASSIMP_REVISION_H_INC
|
{
"pile_set_name": "Github"
}
|
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 15 2018 10:31:50).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard.
//
#import <objc/NSObject.h>
#import <AppKit/_NSCGSWindowOrderingState-Protocol.h>
@class NSString;
__attribute__((visibility("hidden")))
@interface _NSCGSWindowServerOrderingState : NSObject <_NSCGSWindowOrderingState>
{
struct _SLSTransaction *_txn;
}
- (void)conditionallyOrderGroupFront:(id)arg1 withTimestamp:(double)arg2;
- (void)orderApplicationWindowsFront;
- (void)orderGroup:(id)arg1 op:(int)arg2 againstWindow:(id)arg3;
- (void)clearOrderingGroup:(id)arg1;
- (void)removeWindowFromOrderingGroup:(id)arg1;
- (void)addWindow:(id)arg1 toOrderingGroupBelowWindow:(id)arg2;
- (void)addWindow:(id)arg1 toOrderingGroupAboveWindow:(id)arg2;
- (void)unlockWindowSublevel:(id)arg1;
- (void)lockWindow:(id)arg1 toAbsoluteSublevel:(int)arg2;
- (void)reassociateWithSpacesByGeometry:(id)arg1;
- (void)disassociateFromSpacesIfOrderedOut:(id)arg1;
- (void)setDesiredSpace:(unsigned long long)arg1 forWindow:(id)arg2;
- (void)setLevel:(int)arg1 forWindow:(id)arg2;
- (void)orderWindow:(id)arg1 op:(int)arg2 againstWindow:(id)arg3;
- (void)commit;
- (void)dealloc;
- (id)init;
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@end
|
{
"pile_set_name": "Github"
}
|
//===- ARCRegisterInfo.cpp - ARC Register Information -----------*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// This file contains the ARC implementation of the MRegisterInfo class.
//
//===----------------------------------------------------------------------===//
#include "ARCRegisterInfo.h"
#include "ARC.h"
#include "ARCInstrInfo.h"
#include "ARCMachineFunctionInfo.h"
#include "ARCSubtarget.h"
#include "llvm/ADT/BitVector.h"
#include "llvm/CodeGen/MachineFrameInfo.h"
#include "llvm/CodeGen/MachineFunction.h"
#include "llvm/CodeGen/MachineInstrBuilder.h"
#include "llvm/CodeGen/MachineModuleInfo.h"
#include "llvm/CodeGen/MachineRegisterInfo.h"
#include "llvm/CodeGen/RegisterScavenging.h"
#include "llvm/IR/Function.h"
#include "llvm/Support/Debug.h"
#include "llvm/CodeGen/TargetFrameLowering.h"
#include "llvm/Target/TargetMachine.h"
#include "llvm/Target/TargetOptions.h"
using namespace llvm;
#define DEBUG_TYPE "arc-reg-info"
#define GET_REGINFO_TARGET_DESC
#include "ARCGenRegisterInfo.inc"
static void ReplaceFrameIndex(MachineBasicBlock::iterator II,
const ARCInstrInfo &TII, unsigned Reg,
unsigned FrameReg, int Offset, int StackSize,
int ObjSize, RegScavenger *RS, int SPAdj) {
assert(RS && "Need register scavenger.");
MachineInstr &MI = *II;
MachineBasicBlock &MBB = *MI.getParent();
DebugLoc dl = MI.getDebugLoc();
unsigned BaseReg = FrameReg;
unsigned KillState = 0;
if (MI.getOpcode() == ARC::LD_rs9 && (Offset >= 256 || Offset < -256)) {
// Loads can always be reached with LD_rlimm.
BuildMI(MBB, II, dl, TII.get(ARC::LD_rlimm), Reg)
.addReg(BaseReg)
.addImm(Offset)
.addMemOperand(*MI.memoperands_begin());
MBB.erase(II);
return;
}
if (MI.getOpcode() != ARC::GETFI && (Offset >= 256 || Offset < -256)) {
// We need to use a scratch register to reach the far-away frame indexes.
BaseReg = RS->FindUnusedReg(&ARC::GPR32RegClass);
if (!BaseReg) {
// We can be sure that the scavenged-register slot is within the range
// of the load offset.
const TargetRegisterInfo *TRI =
MBB.getParent()->getSubtarget().getRegisterInfo();
BaseReg = RS->scavengeRegister(&ARC::GPR32RegClass, II, SPAdj);
assert(BaseReg && "Register scavenging failed.");
LLVM_DEBUG(dbgs() << "Scavenged register " << printReg(BaseReg, TRI)
<< " for FrameReg=" << printReg(FrameReg, TRI)
<< "+Offset=" << Offset << "\n");
(void)TRI;
RS->setRegUsed(BaseReg);
}
unsigned AddOpc = isUInt<6>(Offset) ? ARC::ADD_rru6 : ARC::ADD_rrlimm;
BuildMI(MBB, II, dl, TII.get(AddOpc))
.addReg(BaseReg, RegState::Define)
.addReg(FrameReg)
.addImm(Offset);
Offset = 0;
KillState = RegState::Kill;
}
switch (MI.getOpcode()) {
case ARC::LD_rs9:
assert((Offset % 4 == 0) && "LD needs 4 byte alignment.");
case ARC::LDH_rs9:
case ARC::LDH_X_rs9:
assert((Offset % 2 == 0) && "LDH needs 2 byte alignment.");
case ARC::LDB_rs9:
case ARC::LDB_X_rs9:
LLVM_DEBUG(dbgs() << "Building LDFI\n");
BuildMI(MBB, II, dl, TII.get(MI.getOpcode()), Reg)
.addReg(BaseReg, KillState)
.addImm(Offset)
.addMemOperand(*MI.memoperands_begin());
break;
case ARC::ST_rs9:
assert((Offset % 4 == 0) && "ST needs 4 byte alignment.");
case ARC::STH_rs9:
assert((Offset % 2 == 0) && "STH needs 2 byte alignment.");
case ARC::STB_rs9:
LLVM_DEBUG(dbgs() << "Building STFI\n");
BuildMI(MBB, II, dl, TII.get(MI.getOpcode()))
.addReg(Reg, getKillRegState(MI.getOperand(0).isKill()))
.addReg(BaseReg, KillState)
.addImm(Offset)
.addMemOperand(*MI.memoperands_begin());
break;
case ARC::GETFI:
LLVM_DEBUG(dbgs() << "Building GETFI\n");
BuildMI(MBB, II, dl,
TII.get(isUInt<6>(Offset) ? ARC::ADD_rru6 : ARC::ADD_rrlimm))
.addReg(Reg, RegState::Define)
.addReg(FrameReg)
.addImm(Offset);
break;
default:
llvm_unreachable("Unhandled opcode.");
}
// Erase old instruction.
MBB.erase(II);
}
ARCRegisterInfo::ARCRegisterInfo() : ARCGenRegisterInfo(ARC::BLINK) {}
bool ARCRegisterInfo::needsFrameMoves(const MachineFunction &MF) {
return MF.getMMI().hasDebugInfo() || MF.getFunction().needsUnwindTableEntry();
}
const MCPhysReg *
ARCRegisterInfo::getCalleeSavedRegs(const MachineFunction *MF) const {
return CSR_ARC_SaveList;
}
BitVector ARCRegisterInfo::getReservedRegs(const MachineFunction &MF) const {
BitVector Reserved(getNumRegs());
Reserved.set(ARC::ILINK);
Reserved.set(ARC::SP);
Reserved.set(ARC::GP);
Reserved.set(ARC::R25);
Reserved.set(ARC::BLINK);
Reserved.set(ARC::FP);
return Reserved;
}
bool ARCRegisterInfo::requiresRegisterScavenging(
const MachineFunction &MF) const {
return true;
}
bool ARCRegisterInfo::trackLivenessAfterRegAlloc(
const MachineFunction &MF) const {
return true;
}
bool ARCRegisterInfo::useFPForScavengingIndex(const MachineFunction &MF) const {
return true;
}
void ARCRegisterInfo::eliminateFrameIndex(MachineBasicBlock::iterator II,
int SPAdj, unsigned FIOperandNum,
RegScavenger *RS) const {
assert(SPAdj == 0 && "Unexpected");
MachineInstr &MI = *II;
MachineOperand &FrameOp = MI.getOperand(FIOperandNum);
int FrameIndex = FrameOp.getIndex();
MachineFunction &MF = *MI.getParent()->getParent();
const ARCInstrInfo &TII = *MF.getSubtarget<ARCSubtarget>().getInstrInfo();
const ARCFrameLowering *TFI = getFrameLowering(MF);
int Offset = MF.getFrameInfo().getObjectOffset(FrameIndex);
int ObjSize = MF.getFrameInfo().getObjectSize(FrameIndex);
int StackSize = MF.getFrameInfo().getStackSize();
int LocalFrameSize = MF.getFrameInfo().getLocalFrameSize();
LLVM_DEBUG(dbgs() << "\nFunction : " << MF.getName() << "\n");
LLVM_DEBUG(dbgs() << "<--------->\n");
LLVM_DEBUG(dbgs() << MI << "\n");
LLVM_DEBUG(dbgs() << "FrameIndex : " << FrameIndex << "\n");
LLVM_DEBUG(dbgs() << "ObjSize : " << ObjSize << "\n");
LLVM_DEBUG(dbgs() << "FrameOffset : " << Offset << "\n");
LLVM_DEBUG(dbgs() << "StackSize : " << StackSize << "\n");
LLVM_DEBUG(dbgs() << "LocalFrameSize : " << LocalFrameSize << "\n");
(void)LocalFrameSize;
// Special handling of DBG_VALUE instructions.
if (MI.isDebugValue()) {
unsigned FrameReg = getFrameRegister(MF);
MI.getOperand(FIOperandNum).ChangeToRegister(FrameReg, false /*isDef*/);
MI.getOperand(FIOperandNum + 1).ChangeToImmediate(Offset);
return;
}
// fold constant into offset.
Offset += MI.getOperand(FIOperandNum + 1).getImm();
// TODO: assert based on the load type:
// ldb needs no alignment,
// ldh needs 2 byte alignment
// ld needs 4 byte alignment
LLVM_DEBUG(dbgs() << "Offset : " << Offset << "\n"
<< "<--------->\n");
unsigned Reg = MI.getOperand(0).getReg();
assert(ARC::GPR32RegClass.contains(Reg) && "Unexpected register operand");
if (!TFI->hasFP(MF)) {
Offset = StackSize + Offset;
if (FrameIndex >= 0)
assert((Offset >= 0 && Offset < StackSize) && "SP Offset not in bounds.");
} else {
if (FrameIndex >= 0) {
assert((Offset < 0 && -Offset <= StackSize) &&
"FP Offset not in bounds.");
}
}
ReplaceFrameIndex(II, TII, Reg, getFrameRegister(MF), Offset, StackSize,
ObjSize, RS, SPAdj);
}
unsigned ARCRegisterInfo::getFrameRegister(const MachineFunction &MF) const {
const ARCFrameLowering *TFI = getFrameLowering(MF);
return TFI->hasFP(MF) ? ARC::FP : ARC::SP;
}
const uint32_t *
ARCRegisterInfo::getCallPreservedMask(const MachineFunction &MF,
CallingConv::ID CC) const {
return CSR_ARC_RegMask;
}
|
{
"pile_set_name": "Github"
}
|
// +build !darwin
package dbus
import (
"bytes"
"errors"
"fmt"
"os"
"os/exec"
)
const defaultSystemBusAddress = "unix:path=/var/run/dbus/system_bus_socket"
func getSessionBusPlatformAddress() (string, error) {
cmd := exec.Command("dbus-launch")
b, err := cmd.CombinedOutput()
if err != nil {
return "", err
}
i := bytes.IndexByte(b, '=')
j := bytes.IndexByte(b, '\n')
if i == -1 || j == -1 {
return "", errors.New("dbus: couldn't determine address of session bus")
}
env, addr := string(b[0:i]), string(b[i+1:j])
os.Setenv(env, addr)
return addr, nil
}
func getSystemBusPlatformAddress() string {
address := os.Getenv("DBUS_SYSTEM_BUS_ADDRESS")
if address != "" {
return fmt.Sprintf("unix:path=%s", address)
}
return defaultSystemBusAddress
}
|
{
"pile_set_name": "Github"
}
|
/* Copyright (C) 2017 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
/**
* \file
*
* \author Napatech Inc.
* \author Phil Young <py@napatech.com>
*
*
*/
#include "suricata-common.h"
#ifdef HAVE_NAPATECH
#include "suricata.h"
#include "util-device.h"
#include "util-cpu.h"
#include "util-byte.h"
#include "threadvars.h"
#include "tm-threads.h"
#include "util-napatech.h"
#include "source-napatech.h"
#ifdef NAPATECH_ENABLE_BYPASS
/*
* counters to track the number of flows programmed on
* the adapter.
*/
typedef struct FlowStatsCounters_
{
uint16_t active_bypass_flows;
uint16_t total_bypass_flows;
} FlowStatsCounters;
static int bypass_supported;
int NapatechIsBypassSupported(void)
{
return bypass_supported;
}
/**
* \brief Returns the number of Napatech Adapters in the system.
*
* \return count of the Napatech adapters present in the system.
*/
int NapatechGetNumAdapters(void)
{
NtInfoStream_t hInfo;
NtInfo_t hInfoSys;
int status;
static int num_adapters = -1;
if (num_adapters == -1) {
if ((status = NT_InfoOpen(&hInfo, "InfoStream")) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_OPEN_FAILED, status);
exit(EXIT_FAILURE);
}
hInfoSys.cmd = NT_INFO_CMD_READ_SYSTEM;
if ((status = NT_InfoRead(hInfo, &hInfoSys)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_OPEN_FAILED, status);
exit(EXIT_FAILURE);
}
num_adapters = hInfoSys.u.system.data.numAdapters;
NT_InfoClose(hInfo);
}
return num_adapters;
}
/**
* \brief Verifies that the Napatech adapters support bypass.
*
* Attempts to opens a FlowStream on each adapter present in the system.
* If successful then bypass is supported
*
* \return 1 if Bypass functionality is supported; zero otherwise.
*/
int NapatechVerifyBypassSupport(void)
{
int status;
int adapter = 0;
int num_adapters = NapatechGetNumAdapters();
SCLogInfo("Found %d Napatech adapters.", num_adapters);
NtFlowStream_t hFlowStream;
if (!NapatechUseHWBypass()) {
/* HW Bypass is disabled in the conf file */
return 0;
}
for (adapter = 0; adapter < num_adapters; ++adapter) {
NtFlowAttr_t attr;
char flow_name[80];
NT_FlowOpenAttrInit(&attr);
NT_FlowOpenAttrSetAdapterNo(&attr, adapter);
snprintf(flow_name, sizeof(flow_name), "Flow stream %d", adapter );
SCLogInfo("Opening flow programming stream: %s\n", flow_name);
if ((status = NT_FlowOpen_Attr(&hFlowStream, flow_name, &attr)) != NT_SUCCESS) {
SCLogWarning(SC_WARN_COMPATIBILITY, "Napatech bypass functionality not supported by the FPGA version on adapter %d - disabling support.", adapter);
bypass_supported = 0;
return 0;
}
NT_FlowClose(hFlowStream);
}
bypass_supported = 1;
return bypass_supported;
}
/**
* \brief Updates statistic counters for Napatech FlowStats
*
* \param tv Thread variable to ThreadVars
* \param hInfo Handle to the Napatech InfoStream.
* \param hstat_stream Handle to the Napatech Statistics Stream.
* \param flow_counters The flow counters statistics to update.
* \param clear_stats Indicates if statistics on the card should be reset to zero.
*
*/
static void UpdateFlowStats(
ThreadVars *tv,
NtInfoStream_t hInfo,
NtStatStream_t hstat_stream,
FlowStatsCounters flow_counters,
int clear_stats
)
{
NtStatistics_t hStat;
int status;
uint64_t programed = 0;
uint64_t removed = 0;
int adapter = 0;
for (adapter = 0; adapter < NapatechGetNumAdapters(); ++adapter) {
hStat.cmd = NT_STATISTICS_READ_CMD_FLOW_V0;
hStat.u.flowData_v0.clear = clear_stats;
hStat.u.flowData_v0.adapterNo = adapter;
if ((status = NT_StatRead(hstat_stream, &hStat)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
exit(1);
}
programed = hStat.u.flowData_v0.learnDone;
removed = hStat.u.flowData_v0.unlearnDone
+ hStat.u.flowData_v0.automaticUnlearnDone
+ hStat.u.flowData_v0.timeoutUnlearnDone;
}
StatsSetUI64(tv, flow_counters.active_bypass_flows, programed - removed);
StatsSetUI64(tv, flow_counters.total_bypass_flows, programed);
}
#endif /* NAPATECH_ENABLE_BYPASS */
/*-----------------------------------------------------------------------------
*-----------------------------------------------------------------------------
* Statistics code
*-----------------------------------------------------------------------------
*/
typedef struct PacketCounters_
{
uint16_t pkts;
uint16_t byte;
uint16_t drop_pkts;
uint16_t drop_byte;
} PacketCounters;
NapatechCurrentStats total_stats;
NapatechCurrentStats current_stats[MAX_STREAMS];
NapatechCurrentStats NapatechGetCurrentStats(uint16_t id)
{
return current_stats[id];
}
enum CONFIG_SPECIFIER {
CONFIG_SPECIFIER_UNDEFINED = 0,
CONFIG_SPECIFIER_RANGE,
CONFIG_SPECIFIER_INDIVIDUAL
};
#define MAX_HOSTBUFFERS 8
/**
* \brief Test to see if any of the configured streams are active
*
* \param hInfo Handle to Napatech Info Stream.
* \param hStatsStream Handle to Napatech Statistics stream
* \param stream_config array of stream configuration structures
* \param num_inst
*
*/
static uint16_t TestStreamConfig(
NtInfoStream_t hInfo,
NtStatStream_t hstat_stream,
NapatechStreamConfig stream_config[],
uint16_t num_inst)
{
uint16_t num_active = 0;
for (uint16_t inst = 0; inst < num_inst; ++inst) {
int status;
NtStatistics_t stat; // Stat handle.
/* Check to see if it is an active stream */
memset(&stat, 0, sizeof (NtStatistics_t));
/* Read usage data for the chosen stream ID */
stat.cmd = NT_STATISTICS_READ_CMD_USAGE_DATA_V0;
stat.u.usageData_v0.streamid = (uint8_t) stream_config[inst].stream_id;
if ((status = NT_StatRead(hstat_stream, &stat)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
return 0;
}
if (stat.u.usageData_v0.data.numHostBufferUsed > 0) {
stream_config[inst].is_active = true;
num_active++;
} else {
stream_config[inst].is_active = false;
}
}
return num_active;
}
/**
* \brief Updates Napatech packet counters
*
* \param tv Pointer to TheardVars structure
* \param hInfo Handle to Napatech Info Stream.
* \param hstat_stream Handle to Napatech Statistics stream
* \param num_streams the number of streams that are currently active
* \param stream_config array of stream configuration structures
* \param total_counters - cumulative count of all packets received.
* \param dispatch_host, - Count of packets that were delivered to the host buffer
* \param dispatch_drop - count of packets that were dropped as a result of a rule
* \param dispatch_fwd - count of packets forwarded out the egress port as the result of a rule
* \param is_inline - are we running in inline mode?
* \param enable_stream_stats - are per thread/stream statistics enabled.
* \param stream_counters - counters for each thread/stream configured.
*
* \return The number of active streams that were updated.
*
*/
static uint32_t UpdateStreamStats(ThreadVars *tv,
NtInfoStream_t hInfo,
NtStatStream_t hstat_stream,
uint16_t num_streams,
NapatechStreamConfig stream_config[],
PacketCounters total_counters,
PacketCounters dispatch_host,
PacketCounters dispatch_drop,
PacketCounters dispatch_fwd,
int is_inline,
int enable_stream_stats,
PacketCounters stream_counters[]
) {
static uint64_t rxPktsStart[MAX_STREAMS] = {0};
static uint64_t rxByteStart[MAX_STREAMS] = {0};
static uint64_t dropPktStart[MAX_STREAMS] = {0};
static uint64_t dropByteStart[MAX_STREAMS] = {0};
int status;
NtInfo_t hStreamInfo;
NtStatistics_t hStat; // Stat handle.
/* Query the system to get the number of streams currently instantiated */
hStreamInfo.cmd = NT_INFO_CMD_READ_STREAM;
if ((status = NT_InfoRead(hInfo, &hStreamInfo)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
exit(EXIT_FAILURE);
}
uint16_t num_active;
if ((num_active = TestStreamConfig(hInfo, hstat_stream, stream_config, num_streams)) == 0) {
/* None of the configured streams are active */
return 0;
}
/* At least one stream is active so proceed with the stats. */
uint16_t inst_id = 0;
uint32_t stream_cnt = 0;
for (stream_cnt = 0; stream_cnt < num_streams; ++stream_cnt) {
while (inst_id < num_streams) {
if (stream_config[inst_id].is_active) {
break;
} else {
++inst_id;
}
}
if (inst_id == num_streams)
break;
/* Read usage data for the chosen stream ID */
memset(&hStat, 0, sizeof (NtStatistics_t));
hStat.cmd = NT_STATISTICS_READ_CMD_USAGE_DATA_V0;
hStat.u.usageData_v0.streamid = (uint8_t) stream_config[inst_id].stream_id;
if ((status = NT_StatRead(hstat_stream, &hStat)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
return 0;
}
uint16_t stream_id = stream_config[inst_id].stream_id;
if (stream_config[inst_id].is_active) {
uint64_t rx_pkts_total = 0;
uint64_t rx_byte_total = 0;
uint64_t drop_pkts_total = 0;
uint64_t drop_byte_total = 0;
for (uint32_t hbCount = 0; hbCount < hStat.u.usageData_v0.data.numHostBufferUsed; hbCount++) {
if (unlikely(stream_config[inst_id].initialized == false)) {
rxPktsStart[stream_id] += hStat.u.usageData_v0.data.hb[hbCount].stat.rx.frames;
rxByteStart[stream_id] += hStat.u.usageData_v0.data.hb[hbCount].stat.rx.bytes;
dropPktStart[stream_id] += hStat.u.usageData_v0.data.hb[hbCount].stat.drop.frames;
dropByteStart[stream_id] += hStat.u.usageData_v0.data.hb[hbCount].stat.drop.bytes;
stream_config[inst_id].initialized = true;
} else {
rx_pkts_total += hStat.u.usageData_v0.data.hb[hbCount].stat.rx.frames;
rx_byte_total += hStat.u.usageData_v0.data.hb[hbCount].stat.rx.bytes;
drop_pkts_total += hStat.u.usageData_v0.data.hb[hbCount].stat.drop.frames;
drop_byte_total += hStat.u.usageData_v0.data.hb[hbCount].stat.drop.bytes;
}
}
current_stats[stream_id].current_packets = rx_pkts_total - rxPktsStart[stream_id];
current_stats[stream_id].current_bytes = rx_byte_total - rxByteStart[stream_id];
current_stats[stream_id].current_drop_packets = drop_pkts_total - dropPktStart[stream_id];
current_stats[stream_id].current_drop_bytes = drop_byte_total - dropByteStart[stream_id];
}
if (enable_stream_stats) {
StatsSetUI64(tv, stream_counters[inst_id].pkts, current_stats[stream_id].current_packets);
StatsSetUI64(tv, stream_counters[inst_id].byte, current_stats[stream_id].current_bytes);
StatsSetUI64(tv, stream_counters[inst_id].drop_pkts, current_stats[stream_id].current_drop_packets);
StatsSetUI64(tv, stream_counters[inst_id].drop_byte, current_stats[stream_id].current_drop_bytes);
}
++inst_id;
}
uint32_t stream_id;
for (stream_id = 0; stream_id < num_streams; ++stream_id) {
#ifndef NAPATECH_ENABLE_BYPASS
total_stats.current_packets += current_stats[stream_id].current_packets;
total_stats.current_bytes += current_stats[stream_id].current_bytes;
#endif /* NAPATECH_ENABLE_BYPASS */
total_stats.current_drop_packets += current_stats[stream_id].current_drop_packets;
total_stats.current_drop_bytes += current_stats[stream_id].current_drop_bytes;
}
#ifndef NAPATECH_ENABLE_BYPASS
StatsSetUI64(tv, total_counters.pkts, total_stats.current_packets);
StatsSetUI64(tv, total_counters.byte, total_stats.current_bytes);
#endif /* NAPATECH_ENABLE_BYPASS */
StatsSetUI64(tv, total_counters.drop_pkts, total_stats.current_drop_packets);
StatsSetUI64(tv, total_counters.drop_byte, total_stats.current_drop_bytes);
total_stats.current_packets = 0;
total_stats.current_bytes = 0;
total_stats.current_drop_packets = 0;
total_stats.current_drop_bytes = 0;
/* Read usage data for the chosen stream ID */
memset(&hStat, 0, sizeof (NtStatistics_t));
#ifdef NAPATECH_ENABLE_BYPASS
hStat.cmd = NT_STATISTICS_READ_CMD_QUERY_V3;
hStat.u.query_v3.clear = 0;
#else /* NAPATECH_ENABLE_BYPASS */
/* Older versions of the API have a different structure. */
hStat.cmd = NT_STATISTICS_READ_CMD_QUERY_V2;
hStat.u.query_v2.clear = 0;
#endif /* !NAPATECH_ENABLE_BYPASS */
if ((status = NT_StatRead(hstat_stream, &hStat)) != NT_SUCCESS) {
if (status == NT_STATUS_TIMEOUT) {
SCLogInfo("Statistics timed out - will retry next time.");
return 0;
} else {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
return 0;
}
}
#ifdef NAPATECH_ENABLE_BYPASS
int adapter = 0;
uint64_t total_dispatch_host_pkts = 0;
uint64_t total_dispatch_host_byte = 0;
uint64_t total_dispatch_drop_pkts = 0;
uint64_t total_dispatch_drop_byte = 0;
uint64_t total_dispatch_fwd_pkts = 0;
uint64_t total_dispatch_fwd_byte = 0;
for (adapter = 0; adapter < NapatechGetNumAdapters(); ++adapter) {
total_dispatch_host_pkts += hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[0].pkts;
total_dispatch_host_byte += hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[0].octets;
total_dispatch_drop_pkts += hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[1].pkts
+ hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[3].pkts;
total_dispatch_drop_byte += hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[1].octets
+ hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[3].octets;
total_dispatch_fwd_pkts += hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[2].pkts
+ hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[4].pkts;
total_dispatch_fwd_byte += hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[2].octets
+ hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[4].octets;
total_stats.current_packets += hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[0].pkts
+ hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[1].pkts
+ hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[2].pkts
+ hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[3].pkts;
total_stats.current_bytes = hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[0].octets
+ hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[1].octets
+ hStat.u.query_v3.data.adapter.aAdapters[adapter].color.aColor[2].octets;
}
StatsSetUI64(tv, dispatch_host.pkts, total_dispatch_host_pkts);
StatsSetUI64(tv, dispatch_host.byte, total_dispatch_host_byte);
StatsSetUI64(tv, dispatch_drop.pkts, total_dispatch_drop_pkts);
StatsSetUI64(tv, dispatch_drop.byte, total_dispatch_drop_byte);
if (is_inline) {
StatsSetUI64(tv, dispatch_fwd.pkts, total_dispatch_fwd_pkts);
StatsSetUI64(tv, dispatch_fwd.byte, total_dispatch_fwd_byte);
}
StatsSetUI64(tv, total_counters.pkts, total_stats.current_packets);
StatsSetUI64(tv, total_counters.byte, total_stats.current_bytes);
#endif /* NAPATECH_ENABLE_BYPASS */
return num_active;
}
/**
* \brief Statistics processing loop
*
* Instantiated on the stats thread. Periodically retrieives
* statistics from the Napatech card and updates the packet counters
*
* \param arg Pointer that is caste into a TheardVars structure
*/
static void *NapatechStatsLoop(void *arg)
{
ThreadVars *tv = (ThreadVars *) arg;
int status;
NtInfoStream_t hInfo;
NtStatStream_t hstat_stream;
int is_inline = 0;
int enable_stream_stats = 0;
PacketCounters stream_counters[MAX_STREAMS];
if (ConfGetBool("napatech.inline", &is_inline) == 0) {
is_inline = 0;
}
if (ConfGetBool("napatech.enable-stream-stats", &enable_stream_stats) == 0) {
/* default is "no" */
enable_stream_stats = 0;
}
NapatechStreamConfig stream_config[MAX_STREAMS];
uint16_t stream_cnt = NapatechGetStreamConfig(stream_config);
/* Open the info and Statistics */
if ((status = NT_InfoOpen(&hInfo, "StatsLoopInfoStream")) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
return NULL;
}
if ((status = NT_StatOpen(&hstat_stream, "StatsLoopStatsStream")) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
return NULL;
}
NtStatistics_t hStat;
memset(&hStat, 0, sizeof (NtStatistics_t));
#ifdef NAPATECH_ENABLE_BYPASS
hStat.cmd = NT_STATISTICS_READ_CMD_QUERY_V3;
hStat.u.query_v3.clear = 1;
#else /* NAPATECH_ENABLE_BYPASS */
hStat.cmd = NT_STATISTICS_READ_CMD_QUERY_V2;
hStat.u.query_v2.clear = 1;
#endif /* !NAPATECH_ENABLE_BYPASS */
if ((status = NT_StatRead(hstat_stream, &hStat)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
return 0;
}
PacketCounters total_counters;
memset(&total_counters, 0, sizeof(total_counters));
PacketCounters dispatch_host;
memset(&dispatch_host, 0, sizeof(dispatch_host));
PacketCounters dispatch_drop;
memset(&dispatch_drop, 0, sizeof(dispatch_drop));
PacketCounters dispatch_fwd;
memset(&dispatch_fwd, 0, sizeof(dispatch_fwd));
total_counters.pkts = StatsRegisterCounter("napa_total.pkts", tv);
dispatch_host.pkts = StatsRegisterCounter("napa_dispatch_host.pkts", tv);
dispatch_drop.pkts = StatsRegisterCounter("napa_dispatch_drop.pkts", tv);
if (is_inline) {
dispatch_fwd.pkts = StatsRegisterCounter("napa_dispatch_fwd.pkts", tv);
}
total_counters.byte = StatsRegisterCounter("napa_total.byte", tv);
dispatch_host.byte = StatsRegisterCounter("napa_dispatch_host.byte", tv);
dispatch_drop.byte = StatsRegisterCounter("napa_dispatch_drop.byte", tv);
if (is_inline) {
dispatch_fwd.byte = StatsRegisterCounter("napa_dispatch_fwd.byte", tv);
}
total_counters.drop_pkts = StatsRegisterCounter("napa_total.overflow_drop_pkts", tv);
total_counters.drop_byte = StatsRegisterCounter("napa_total.overflow_drop_byte", tv);
if (enable_stream_stats) {
for (int i = 0; i < stream_cnt; ++i) {
char *pkts_buf = SCCalloc(1, 32);
if (unlikely(pkts_buf == NULL)) {
FatalError(SC_ERR_FATAL,
"Failed to allocate memory for NAPATECH stream counter.");
}
snprintf(pkts_buf, 32, "napa%d.pkts", stream_config[i].stream_id);
stream_counters[i].pkts = StatsRegisterCounter(pkts_buf, tv);
char *byte_buf = SCCalloc(1, 32);
if (unlikely(byte_buf == NULL)) {
FatalError(SC_ERR_FATAL,
"Failed to allocate memory for NAPATECH stream counter.");
}
snprintf(byte_buf, 32, "napa%d.bytes", stream_config[i].stream_id);
stream_counters[i].byte = StatsRegisterCounter(byte_buf, tv);
char *drop_pkts_buf = SCCalloc(1, 32);
if (unlikely(drop_pkts_buf == NULL)) {
FatalError(SC_ERR_FATAL,
"Failed to allocate memory for NAPATECH stream counter.");
}
snprintf(drop_pkts_buf, 32, "napa%d.drop_pkts", stream_config[i].stream_id);
stream_counters[i].drop_pkts = StatsRegisterCounter(drop_pkts_buf, tv);
char *drop_byte_buf = SCCalloc(1, 32);
if (unlikely(drop_byte_buf == NULL)) {
FatalError(SC_ERR_FATAL,
"Failed to allocate memory for NAPATECH stream counter.");
}
snprintf(drop_byte_buf, 32, "napa%d.drop_byte", stream_config[i].stream_id);
stream_counters[i].drop_byte = StatsRegisterCounter(drop_byte_buf, tv);
}
}
#ifdef NAPATECH_ENABLE_BYPASS
FlowStatsCounters flow_counters;
if (bypass_supported) {
flow_counters.active_bypass_flows = StatsRegisterCounter("napa_bypass.active_flows", tv);
flow_counters.total_bypass_flows = StatsRegisterCounter("napa_bypass.total_flows", tv);
}
#endif /* NAPATECH_ENABLE_BYPASS */
StatsSetupPrivate(tv);
StatsSetUI64(tv, total_counters.pkts, 0);
StatsSetUI64(tv, total_counters.byte, 0);
StatsSetUI64(tv, total_counters.drop_pkts, 0);
StatsSetUI64(tv, total_counters.drop_byte, 0);
#ifdef NAPATECH_ENABLE_BYPASS
if (bypass_supported) {
StatsSetUI64(tv, dispatch_host.pkts, 0);
StatsSetUI64(tv, dispatch_drop.pkts, 0);
if (is_inline) {
StatsSetUI64(tv, dispatch_fwd.pkts, 0);
}
StatsSetUI64(tv, dispatch_host.byte, 0);
StatsSetUI64(tv, dispatch_drop.byte, 0);
if (is_inline) {
StatsSetUI64(tv, dispatch_fwd.byte, 0);
}
if (enable_stream_stats) {
for (int i = 0; i < stream_cnt; ++i) {
StatsSetUI64(tv, stream_counters[i].pkts, 0);
StatsSetUI64(tv, stream_counters[i].byte, 0);
StatsSetUI64(tv, stream_counters[i].drop_pkts, 0);
StatsSetUI64(tv, stream_counters[i].drop_byte, 0);
}
}
StatsSetUI64(tv, flow_counters.active_bypass_flows, 0);
StatsSetUI64(tv, flow_counters.total_bypass_flows, 0);
UpdateFlowStats(tv, hInfo, hstat_stream, flow_counters, 1);
}
#endif /* NAPATECH_ENABLE_BYPASS */
uint32_t num_active = UpdateStreamStats(tv, hInfo, hstat_stream,
stream_cnt, stream_config, total_counters,
dispatch_host, dispatch_drop, dispatch_fwd,
is_inline, enable_stream_stats, stream_counters);
if (!NapatechIsAutoConfigEnabled() && (num_active < stream_cnt)) {
SCLogInfo("num_active: %d, stream_cnt: %d", num_active, stream_cnt);
SCLogWarning(SC_ERR_NAPATECH_CONFIG_STREAM,
"Some or all of the configured streams are not created. Proceeding with active streams.");
}
TmThreadsSetFlag(tv, THV_INIT_DONE);
while (1) {
if (TmThreadsCheckFlag(tv, THV_KILL)) {
SCLogDebug("NapatechStatsLoop THV_KILL detected");
break;
}
UpdateStreamStats(tv, hInfo, hstat_stream,
stream_cnt, stream_config, total_counters,
dispatch_host, dispatch_drop, dispatch_fwd,
is_inline, enable_stream_stats,
stream_counters);
#ifdef NAPATECH_ENABLE_BYPASS
if (bypass_supported) {
UpdateFlowStats(tv, hInfo, hstat_stream, flow_counters, 0);
}
#endif /* NAPATECH_ENABLE_BYPASS */
StatsSyncCountersIfSignalled(tv);
usleep(1000000);
}
/* CLEAN UP NT Resources and Close the info stream */
if ((status = NT_InfoClose(hInfo)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
return NULL;
}
/* Close the statistics stream */
if ((status = NT_StatClose(hstat_stream)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
return NULL;
}
SCLogDebug("Exiting NapatechStatsLoop");
TmThreadsSetFlag(tv, THV_RUNNING_DONE);
TmThreadWaitForFlag(tv, THV_DEINIT);
TmThreadsSetFlag(tv, THV_CLOSED);
return NULL;
}
#define MAX_HOSTBUFFER 4
#define MAX_STREAMS 256
#define HB_HIGHWATER 2048 //1982
/**
* \brief Tests whether a particular stream_id is actively registered
*
* \param stream_id - ID of the stream to look up
* \param num_registered - The total number of registered streams
* \param registered_streams - An array containing actively registered streams.
*
* \return Bool indicating is the specified stream is registered.
*
*/
static bool RegisteredStream(uint16_t stream_id, uint16_t num_registered,
NapatechStreamConfig registered_streams[])
{
for (uint16_t reg_id = 0; reg_id < num_registered; ++reg_id) {
if (stream_id == registered_streams[reg_id].stream_id) {
return true;
}
}
return false;
}
/**
* \brief Count the number of worker threads defined in the conf file.
*
* \return - The number of worker threads defined by the configuration
*/
static uint32_t CountWorkerThreads(void)
{
int worker_count = 0;
ConfNode *affinity;
ConfNode *root = ConfGetNode("threading.cpu-affinity");
if (root != NULL) {
TAILQ_FOREACH(affinity, &root->head, next)
{
if (strcmp(affinity->val, "decode-cpu-set") == 0 ||
strcmp(affinity->val, "stream-cpu-set") == 0 ||
strcmp(affinity->val, "reject-cpu-set") == 0 ||
strcmp(affinity->val, "output-cpu-set") == 0) {
continue;
}
if (strcmp(affinity->val, "worker-cpu-set") == 0) {
ConfNode *node = ConfNodeLookupChild(affinity->head.tqh_first, "cpu");
ConfNode *lnode;
enum CONFIG_SPECIFIER cpu_spec = CONFIG_SPECIFIER_UNDEFINED;
TAILQ_FOREACH(lnode, &node->head, next)
{
uint8_t start, end;
char *end_str;
if (strncmp(lnode->val, "all", 4) == 0) {
/* check that the sting in the config file is correctly specified */
if (cpu_spec != CONFIG_SPECIFIER_UNDEFINED) {
FatalError(SC_ERR_FATAL,
"Only one Napatech port specifier type allowed.");
}
cpu_spec = CONFIG_SPECIFIER_RANGE;
worker_count = UtilCpuGetNumProcessorsConfigured();
} else if ((end_str = strchr(lnode->val, '-'))) {
/* check that the sting in the config file is correctly specified */
if (cpu_spec != CONFIG_SPECIFIER_UNDEFINED) {
FatalError(SC_ERR_FATAL,
"Only one Napatech port specifier type allowed.");
}
cpu_spec = CONFIG_SPECIFIER_RANGE;
if (StringParseUint8(&start, 10, end_str - lnode->val, (const char *)lnode->val) < 0) {
FatalError(SC_ERR_INVALID_VALUE, "Napatech invalid"
" worker range start: '%s'", lnode->val);
}
if (StringParseUint8(&end, 10, 0, (const char *) (end_str + 1)) < 0) {
FatalError(SC_ERR_INVALID_VALUE, "Napatech invalid"
" worker range end: '%s'", (end_str != NULL) ? (const char *)(end_str + 1) : "Null");
}
if (end < start) {
FatalError(SC_ERR_INVALID_VALUE, "Napatech invalid"
" worker range start: '%d' is greater than end: '%d'", start, end);
}
worker_count = end - start + 1;
} else {
/* check that the sting in the config file is correctly specified */
if (cpu_spec == CONFIG_SPECIFIER_RANGE) {
FatalError(SC_ERR_FATAL,
"Napatech port range specifiers cannot be combined with individual stream specifiers.");
}
cpu_spec = CONFIG_SPECIFIER_INDIVIDUAL;
++worker_count;
}
}
break;
}
}
}
return worker_count;
}
/**
* \brief Reads and parses the stream configuration defined in the config file.
*
* \param stream_config - array to be filled in with active stream info.
*
* \return the number of streams configured or -1 if an error occurred
*
*/
int NapatechGetStreamConfig(NapatechStreamConfig stream_config[])
{
int status;
char error_buffer[80]; // Error buffer
NtStatStream_t hstat_stream;
NtStatistics_t hStat; // Stat handle.
NtInfoStream_t info_stream;
NtInfo_t info;
uint16_t instance_cnt = 0;
int use_all_streams = 0;
int set_cpu_affinity = 0;
ConfNode *ntstreams;
uint16_t stream_id = 0;
uint8_t start = 0;
uint8_t end = 0;
for (uint16_t i = 0; i < MAX_STREAMS; ++i) {
stream_config[i].stream_id = 0;
stream_config[i].is_active = false;
stream_config[i].initialized = false;
}
if (ConfGetBool("napatech.use-all-streams", &use_all_streams) == 0) {
/* default is "no" */
use_all_streams = 0;
}
if ((status = NT_InfoOpen(&info_stream, "SuricataStreamInfo")) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_STREAMS_REGISTER_FAILED, status);
return -1;
}
if ((status = NT_StatOpen(&hstat_stream, "StatsStream")) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_STREAMS_REGISTER_FAILED, status);
return -1;
}
if (use_all_streams) {
info.cmd = NT_INFO_CMD_READ_STREAM;
if ((status = NT_InfoRead(info_stream, &info)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_STREAMS_REGISTER_FAILED, status);
return -1;
}
while (instance_cnt < info.u.stream.data.count) {
/*
* For each stream ID query the number of host-buffers used by
* the stream. If zero, then that streamID is not used; skip
* over it and continue until we get a streamID with a non-zero
* count of the host-buffers.
*/
memset(&hStat, 0, sizeof (NtStatistics_t));
/* Read usage data for the chosen stream ID */
hStat.cmd = NT_STATISTICS_READ_CMD_USAGE_DATA_V0;
hStat.u.usageData_v0.streamid = (uint8_t) stream_id;
if ((status = NT_StatRead(hstat_stream, &hStat)) != NT_SUCCESS) {
/* Get the status code as text */
NT_ExplainError(status, error_buffer, sizeof (error_buffer));
SCLogError(SC_ERR_NAPATECH_INIT_FAILED, "NT_StatRead() failed: %s\n", error_buffer);
return -1;
}
if (hStat.u.usageData_v0.data.numHostBufferUsed == 0) {
++stream_id;
continue;
}
/* if we get here it is an active stream */
stream_config[instance_cnt].stream_id = stream_id++;
stream_config[instance_cnt].is_active = true;
instance_cnt++;
}
} else {
ConfGetBool("threading.set-cpu-affinity", &set_cpu_affinity);
if (NapatechIsAutoConfigEnabled() && (set_cpu_affinity == 1)) {
start = 0;
end = CountWorkerThreads() - 1;
} else {
/* When not using the default streams we need to
* parse the array of streams from the conf */
if ((ntstreams = ConfGetNode("napatech.streams")) == NULL) {
SCLogError(SC_ERR_RUNMODE, "Failed retrieving napatech.streams from Config");
if (NapatechIsAutoConfigEnabled() && (set_cpu_affinity == 0)) {
SCLogError(SC_ERR_RUNMODE,
"if set-cpu-affinity: no in conf then napatech.streams must be defined");
}
exit(EXIT_FAILURE);
}
/* Loop through all stream numbers in the array and register the devices */
ConfNode *stream;
enum CONFIG_SPECIFIER stream_spec = CONFIG_SPECIFIER_UNDEFINED;
instance_cnt = 0;
TAILQ_FOREACH(stream, &ntstreams->head, next)
{
if (stream == NULL) {
SCLogError(SC_ERR_NAPATECH_INIT_FAILED, "Couldn't Parse Stream Configuration");
return -1;
}
char *end_str = strchr(stream->val, '-');
if (end_str) {
if (stream_spec != CONFIG_SPECIFIER_UNDEFINED) {
SCLogError(SC_ERR_NAPATECH_PARSE_CONFIG,
"Only one Napatech stream range specifier allowed.");
return -1;
}
stream_spec = CONFIG_SPECIFIER_RANGE;
if (StringParseUint8(&start, 10, end_str - stream->val,
(const char *)stream->val) < 0) {
FatalError(SC_ERR_INVALID_VALUE, "Napatech invalid "
"stream id start: '%s'", stream->val);
}
if (StringParseUint8(&end, 10, 0, (const char *) (end_str + 1)) < 0) {
FatalError(SC_ERR_INVALID_VALUE, "Napatech invalid "
"stream id end: '%s'", (end_str != NULL) ? (const char *)(end_str + 1) : "Null");
}
} else {
if (stream_spec == CONFIG_SPECIFIER_RANGE) {
FatalError(SC_ERR_FATAL,
"Napatech range and individual specifiers cannot be combined.");
}
stream_spec = CONFIG_SPECIFIER_INDIVIDUAL;
if (StringParseUint8(&stream_config[instance_cnt].stream_id,
10, 0, (const char *)stream->val) < 0) {
FatalError(SC_ERR_INVALID_VALUE, "Napatech invalid "
"stream id: '%s'", stream->val);
}
start = stream_config[instance_cnt].stream_id;
end = stream_config[instance_cnt].stream_id;
}
}
}
for (stream_id = start; stream_id <= end; ++stream_id) {
/* if we get here it is configured in the .yaml file */
stream_config[instance_cnt].stream_id = stream_id;
/* Check to see if it is an active stream */
memset(&hStat, 0, sizeof (NtStatistics_t));
/* Read usage data for the chosen stream ID */
hStat.cmd = NT_STATISTICS_READ_CMD_USAGE_DATA_V0;
hStat.u.usageData_v0.streamid =
(uint8_t) stream_config[instance_cnt].stream_id;
if ((status = NT_StatRead(hstat_stream, &hStat)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
return -1;
}
if (hStat.u.usageData_v0.data.numHostBufferUsed > 0) {
stream_config[instance_cnt].is_active = true;
}
instance_cnt++;
}
}
/* Close the statistics stream */
if ((status = NT_StatClose(hstat_stream)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
return -1;
}
if ((status = NT_InfoClose(info_stream)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
return -1;
}
return instance_cnt;
}
static void *NapatechBufMonitorLoop(void *arg)
{
ThreadVars *tv = (ThreadVars *) arg;
NtInfo_t hStreamInfo;
NtStatistics_t hStat; // Stat handle.
NtInfoStream_t hInfo;
NtStatStream_t hstat_stream;
int status; // Status variable
const uint32_t alertInterval = 25;
#ifndef NAPATECH_ENABLE_BYPASS
uint32_t OB_fill_level[MAX_STREAMS] = {0};
uint32_t OB_alert_level[MAX_STREAMS] = {0};
uint32_t ave_OB_fill_level[MAX_STREAMS] = {0};
#endif /* NAPATECH_ENABLE_BYPASS */
uint32_t HB_fill_level[MAX_STREAMS] = {0};
uint32_t HB_alert_level[MAX_STREAMS] = {0};
uint32_t ave_HB_fill_level[MAX_STREAMS] = {0};
/* Open the info and Statistics */
if ((status = NT_InfoOpen(&hInfo, "InfoStream")) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
exit(EXIT_FAILURE);
}
if ((status = NT_StatOpen(&hstat_stream, "StatsStream")) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
exit(EXIT_FAILURE);
}
/* Read the info on all streams instantiated in the system */
hStreamInfo.cmd = NT_INFO_CMD_READ_STREAM;
if ((status = NT_InfoRead(hInfo, &hStreamInfo)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
exit(EXIT_FAILURE);
}
NapatechStreamConfig registered_streams[MAX_STREAMS];
int num_registered = NapatechGetStreamConfig(registered_streams);
if (num_registered == -1) {
exit(EXIT_FAILURE);
}
TmThreadsSetFlag(tv, THV_INIT_DONE);
while (1) {
if (TmThreadsCheckFlag(tv, THV_KILL)) {
SCLogDebug("NapatechBufMonitorLoop THV_KILL detected");
break;
}
usleep(200000);
/* Read the info on all streams instantiated in the system */
hStreamInfo.cmd = NT_INFO_CMD_READ_STREAM;
if ((status = NT_InfoRead(hInfo, &hStreamInfo)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
exit(EXIT_FAILURE);
}
char pktCntStr[4096];
memset(pktCntStr, 0, sizeof (pktCntStr));
uint32_t stream_id = 0;
uint32_t stream_cnt = 0;
uint32_t num_streams = hStreamInfo.u.stream.data.count;
for (stream_cnt = 0; stream_cnt < num_streams; ++stream_cnt) {
do {
/* Read usage data for the chosen stream ID */
hStat.cmd = NT_STATISTICS_READ_CMD_USAGE_DATA_V0;
hStat.u.usageData_v0.streamid = (uint8_t) stream_id;
if ((status = NT_StatRead(hstat_stream, &hStat)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
exit(EXIT_FAILURE);
}
if (hStat.u.usageData_v0.data.numHostBufferUsed == 0) {
++stream_id;
continue;
}
} while (hStat.u.usageData_v0.data.numHostBufferUsed == 0);
if (RegisteredStream(stream_id, num_registered, registered_streams)) {
#ifndef NAPATECH_ENABLE_BYPASS
ave_OB_fill_level[stream_id] = 0;
#endif /* NAPATECH_ENABLE_BYPASS */
ave_HB_fill_level[stream_id] = 0;
for (uint32_t hb_count = 0; hb_count < hStat.u.usageData_v0.data.numHostBufferUsed; hb_count++) {
#ifndef NAPATECH_ENABLE_BYPASS
OB_fill_level[hb_count] =
((100 * hStat.u.usageData_v0.data.hb[hb_count].onboardBuffering.used) /
hStat.u.usageData_v0.data.hb[hb_count].onboardBuffering.size);
if (OB_fill_level[hb_count] > 100) {
OB_fill_level[hb_count] = 100;
}
#endif /* NAPATECH_ENABLE_BYPASS */
uint32_t bufSize = hStat.u.usageData_v0.data.hb[hb_count].enQueuedAdapter / 1024
+ hStat.u.usageData_v0.data.hb[hb_count].deQueued / 1024
+ hStat.u.usageData_v0.data.hb[hb_count].enQueued / 1024
- HB_HIGHWATER;
HB_fill_level[hb_count] = (uint32_t)
((100 * hStat.u.usageData_v0.data.hb[hb_count].deQueued / 1024) /
bufSize);
#ifndef NAPATECH_ENABLE_BYPASS
ave_OB_fill_level[stream_id] += OB_fill_level[hb_count];
#endif /* NAPATECH_ENABLE_BYPASS */
ave_HB_fill_level[stream_id] += HB_fill_level[hb_count];
}
#ifndef NAPATECH_ENABLE_BYPASS
ave_OB_fill_level[stream_id] /= hStat.u.usageData_v0.data.numHostBufferUsed;
#endif /* NAPATECH_ENABLE_BYPASS */
ave_HB_fill_level[stream_id] /= hStat.u.usageData_v0.data.numHostBufferUsed;
/* Host Buffer Fill Level warnings... */
if (ave_HB_fill_level[stream_id] >= (HB_alert_level[stream_id] + alertInterval)) {
while (ave_HB_fill_level[stream_id] >= HB_alert_level[stream_id] + alertInterval) {
HB_alert_level[stream_id] += alertInterval;
}
SCLogPerf("nt%d - Increasing Host Buffer Fill Level : %4d%%",
stream_id, ave_HB_fill_level[stream_id] - 1);
}
if (HB_alert_level[stream_id] > 0) {
if ((ave_HB_fill_level[stream_id] <= (HB_alert_level[stream_id] - alertInterval))) {
SCLogPerf("nt%d - Decreasing Host Buffer Fill Level: %4d%%",
stream_id, ave_HB_fill_level[stream_id]);
while (ave_HB_fill_level[stream_id] <= (HB_alert_level[stream_id] - alertInterval)) {
if ((HB_alert_level[stream_id]) > 0) {
HB_alert_level[stream_id] -= alertInterval;
} else break;
}
}
}
#ifndef NAPATECH_ENABLE_BYPASS
/* On Board SDRAM Fill Level warnings... */
if (ave_OB_fill_level[stream_id] >= (OB_alert_level[stream_id] + alertInterval)) {
while (ave_OB_fill_level[stream_id] >= OB_alert_level[stream_id] + alertInterval) {
OB_alert_level[stream_id] += alertInterval;
}
SCLogPerf("nt%d - Increasing Adapter SDRAM Fill Level: %4d%%",
stream_id, ave_OB_fill_level[stream_id]);
}
if (OB_alert_level[stream_id] > 0) {
if ((ave_OB_fill_level[stream_id] <= (OB_alert_level[stream_id] - alertInterval))) {
SCLogPerf("nt%d - Decreasing Adapter SDRAM Fill Level : %4d%%",
stream_id, ave_OB_fill_level[stream_id]);
while (ave_OB_fill_level[stream_id] <= (OB_alert_level[stream_id] - alertInterval)) {
if ((OB_alert_level[stream_id]) > 0) {
OB_alert_level[stream_id] -= alertInterval;
} else break;
}
}
}
#endif /* NAPATECH_ENABLE_BYPASS */
}
++stream_id;
}
}
if ((status = NT_InfoClose(hInfo)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
exit(EXIT_FAILURE);
}
/* Close the statistics stream */
if ((status = NT_StatClose(hstat_stream)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
exit(EXIT_FAILURE);
}
SCLogDebug("Exiting NapatechStatsLoop");
TmThreadsSetFlag(tv, THV_RUNNING_DONE);
TmThreadWaitForFlag(tv, THV_DEINIT);
TmThreadsSetFlag(tv, THV_CLOSED);
return NULL;
}
void NapatechStartStats(void)
{
/* Creates the Statistic threads */
ThreadVars *stats_tv = TmThreadCreate("NapatechStats",
NULL, NULL,
NULL, NULL,
"custom", NapatechStatsLoop, 0);
if (stats_tv == NULL) {
FatalError(SC_ERR_FATAL,
"Error creating a thread for NapatechStats - Killing engine.");
}
if (TmThreadSpawn(stats_tv) != 0) {
FatalError(SC_ERR_FATAL,
"Failed to spawn thread for NapatechStats - Killing engine.");
}
#ifdef NAPATECH_ENABLE_BYPASS
if (bypass_supported) {
SCLogInfo("Napatech bypass functionality enabled.");
}
#endif /* NAPATECH_ENABLE_BYPASS */
ThreadVars *buf_monitor_tv = TmThreadCreate("NapatechBufMonitor",
NULL, NULL,
NULL, NULL,
"custom", NapatechBufMonitorLoop, 0);
if (buf_monitor_tv == NULL) {
FatalError(SC_ERR_FATAL,
"Error creating a thread for NapatechBufMonitor - Killing engine.");
}
if (TmThreadSpawn(buf_monitor_tv) != 0) {
FatalError(SC_ERR_FATAL,
"Failed to spawn thread for NapatechBufMonitor - Killing engine.");
}
return;
}
bool NapatechSetupNuma(uint32_t stream, uint32_t numa)
{
uint32_t status = 0;
static NtConfigStream_t hconfig;
char ntpl_cmd[64];
snprintf(ntpl_cmd, 64, "setup[numanode=%d] = streamid == %d", numa, stream);
NtNtplInfo_t ntpl_info;
if ((status = NT_ConfigOpen(&hconfig, "ConfigStream")) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_STREAMS_REGISTER_FAILED, status);
return false;
}
if ((status = NT_NTPL(hconfig, ntpl_cmd, &ntpl_info, NT_NTPL_PARSER_VALIDATE_NORMAL)) == NT_SUCCESS) {
status = ntpl_info.ntplId;
} else {
NAPATECH_NTPL_ERROR(ntpl_cmd, ntpl_info, status);
return false;
}
return status;
}
static uint32_t NapatechSetHashmode(void)
{
uint32_t status = 0;
const char *hash_mode;
static NtConfigStream_t hconfig;
char ntpl_cmd[64];
NtNtplInfo_t ntpl_info;
uint32_t filter_id = 0;
/* Get the hashmode from the conf file. */
ConfGetValue("napatech.hashmode", &hash_mode);
snprintf(ntpl_cmd, 64, "hashmode = %s", hash_mode);
/* Issue the NTPL command */
if ((status = NT_ConfigOpen(&hconfig, "ConfigStream")) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_STREAMS_REGISTER_FAILED, status);
return false;
}
if ((status = NT_NTPL(hconfig, ntpl_cmd, &ntpl_info,
NT_NTPL_PARSER_VALIDATE_NORMAL)) == NT_SUCCESS) {
filter_id = ntpl_info.ntplId;
SCLogConfig("Napatech hashmode: %s ID: %d", hash_mode, status);
} else {
NAPATECH_NTPL_ERROR(ntpl_cmd, ntpl_info, status);
status = 0;
}
return filter_id;
}
static uint32_t GetStreamNUMAs(uint32_t stream_id, int stream_numas[])
{
NtStatistics_t hStat; // Stat handle.
NtStatStream_t hstat_stream;
int status; // Status variable
for (int i = 0; i < MAX_HOSTBUFFERS; ++i)
stream_numas[i] = -1;
if ((status = NT_StatOpen(&hstat_stream, "StatsStream")) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
exit(EXIT_FAILURE);
}
char pktCntStr[4096];
memset(pktCntStr, 0, sizeof (pktCntStr));
/* Read usage data for the chosen stream ID */
hStat.cmd = NT_STATISTICS_READ_CMD_USAGE_DATA_V0;
hStat.u.usageData_v0.streamid = (uint8_t) stream_id;
if ((status = NT_StatRead(hstat_stream, &hStat)) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
exit(EXIT_FAILURE);
}
for (uint32_t hb_id = 0; hb_id < hStat.u.usageData_v0.data.numHostBufferUsed; ++hb_id) {
stream_numas[hb_id] = hStat.u.usageData_v0.data.hb[hb_id].numaNode;
}
return hStat.u.usageData_v0.data.numHostBufferUsed;
}
static int NapatechSetFilter(NtConfigStream_t hconfig, char *ntpl_cmd)
{
int status = 0;
int local_filter_id = 0;
NtNtplInfo_t ntpl_info;
if ((status = NT_NTPL(hconfig, ntpl_cmd, &ntpl_info,
NT_NTPL_PARSER_VALIDATE_NORMAL)) == NT_SUCCESS) {
SCLogConfig("NTPL filter assignment \"%s\" returned filter id %4d",
ntpl_cmd, local_filter_id);
} else {
NAPATECH_NTPL_ERROR(ntpl_cmd, ntpl_info, status);
exit(EXIT_FAILURE);
}
return local_filter_id;
}
uint32_t NapatechDeleteFilters(void)
{
uint32_t status = 0;
static NtConfigStream_t hconfig;
char ntpl_cmd[64];
NtNtplInfo_t ntpl_info;
if ((status = NT_ConfigOpen(&hconfig, "ConfigStream")) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_STREAMS_REGISTER_FAILED, status);
exit(EXIT_FAILURE);
}
snprintf(ntpl_cmd, 64, "delete = all");
if ((status = NT_NTPL(hconfig, ntpl_cmd, &ntpl_info,
NT_NTPL_PARSER_VALIDATE_NORMAL)) == NT_SUCCESS) {
status = ntpl_info.ntplId;
} else {
NAPATECH_NTPL_ERROR(ntpl_cmd, ntpl_info, status);
status = 0;
}
NT_ConfigClose(hconfig);
return status;
}
uint32_t NapatechSetupTraffic(uint32_t first_stream, uint32_t last_stream)
{
#define PORTS_SPEC_SIZE 64
struct ports_spec_s {
uint8_t first[MAX_PORTS];
uint8_t second[MAX_PORTS];
bool all;
char str[PORTS_SPEC_SIZE];
} ports_spec;
ports_spec.all = false;
ConfNode *ntports;
int iteration = 0;
int status = 0;
NtConfigStream_t hconfig;
char ntpl_cmd[512];
int is_inline = 0;
#ifdef NAPATECH_ENABLE_BYPASS
int is_span_port[MAX_PORTS] = { 0 };
#endif
char span_ports[128];
memset(span_ports, 0, sizeof(span_ports));
if (ConfGetBool("napatech.inline", &is_inline) == 0) {
is_inline = 0;
}
NapatechSetHashmode();
if ((status = NT_ConfigOpen(&hconfig, "ConfigStream")) != NT_SUCCESS) {
NAPATECH_ERROR(SC_ERR_NAPATECH_INIT_FAILED, status);
exit(EXIT_FAILURE);
}
if (first_stream == last_stream) {
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"Setup[state=inactive] = StreamId == %d",
first_stream);
} else {
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"Setup[state=inactive] = StreamId == (%d..%d)",
first_stream, last_stream);
}
NapatechSetFilter(hconfig, ntpl_cmd);
#ifdef NAPATECH_ENABLE_BYPASS
if (NapatechUseHWBypass()) {
SCLogInfo("Napatech Hardware Bypass enabled.");
} else {
SCLogInfo("Napatech Hardware Bypass available but disabled.");
}
#else
if (NapatechUseHWBypass()) {
SCLogInfo("Napatech Hardware Bypass requested in conf but is not available.");
exit(EXIT_FAILURE);
} else {
SCLogInfo("Napatech Hardware Bypass disabled.");
}
if (is_inline) {
FatalError(SC_ERR_FATAL,
"Napatech inline mode not supported. (Only available when Hardware Bypass support is enabled.)");
}
#endif
if (is_inline) {
SCLogInfo("Napatech configured for inline mode.");
} else {
SCLogInfo("Napatech configured for passive (non-inline) mode.");
}
/* When not using the default streams we need to parse
* the array of streams from the conf
*/
if ((ntports = ConfGetNode("napatech.ports")) == NULL) {
FatalError(SC_ERR_FATAL, "Failed retrieving napatech.ports from Conf");
}
/* Loop through all ports in the array */
ConfNode *port;
enum CONFIG_SPECIFIER stream_spec = CONFIG_SPECIFIER_UNDEFINED;
if (NapatechUseHWBypass()) {
SCLogInfo("Listening on the following Napatech ports:");
}
/* Build the NTPL command using values in the config file. */
TAILQ_FOREACH(port, &ntports->head, next)
{
if (port == NULL) {
FatalError(SC_ERR_FATAL,
"Couldn't Parse Port Configuration");
}
if (NapatechUseHWBypass()) {
#ifdef NAPATECH_ENABLE_BYPASS
if (strchr(port->val, '-')) {
stream_spec = CONFIG_SPECIFIER_RANGE;
ByteExtractStringUint8(&ports_spec.first[iteration], 10, 0, port->val);
ByteExtractStringUint8(&ports_spec.second[iteration], 10, 0, strchr(port->val, '-')+1);
if (ports_spec.first[iteration] == ports_spec.second[iteration]) {
if (is_inline) {
FatalError(SC_ERR_FATAL,
"Error with napatec.ports in conf file. When running in inline mode the two ports specifying a segment must be different.");
} else {
/* SPAN port configuration */
is_span_port[ports_spec.first[iteration]] = 1;
if (strlen(span_ports) == 0) {
snprintf(span_ports, sizeof (span_ports), "%d", ports_spec.first[iteration]);
} else {
char temp[16];
snprintf(temp, sizeof(temp), ",%d", ports_spec.first[iteration]);
strlcat(span_ports, temp, sizeof(span_ports));
}
}
}
if (NapatechGetAdapter(ports_spec.first[iteration]) != NapatechGetAdapter(ports_spec.first[iteration])) {
SCLogError(SC_ERR_NAPATECH_PARSE_CONFIG,
"Invalid napatech.ports specification in conf file.");
SCLogError(SC_ERR_NAPATECH_PARSE_CONFIG,
"Two ports on a segment must reside on the same adapter. port %d is on adapter %d, port %d is on adapter %d.",
ports_spec.first[iteration],
NapatechGetAdapter(ports_spec.first[iteration]),
ports_spec.second[iteration],
NapatechGetAdapter(ports_spec.second[iteration])
);
exit(EXIT_FAILURE);
}
NapatechSetPortmap(ports_spec.first[iteration], ports_spec.second[iteration]);
if (ports_spec.first[iteration] == ports_spec.second[iteration]) {
SCLogInfo(" span_port: %d", ports_spec.first[iteration]);
} else {
SCLogInfo(" %s: %d - %d", is_inline ? "inline_ports" : "tap_ports", ports_spec.first[iteration], ports_spec.second[iteration]);
}
if (iteration == 0) {
if (ports_spec.first[iteration] == ports_spec.second[iteration]) {
snprintf(ports_spec.str, sizeof (ports_spec.str), "%d", ports_spec.first[iteration]);
} else {
snprintf(ports_spec.str, sizeof (ports_spec.str), "%d,%d", ports_spec.first[iteration], ports_spec.second[iteration]);
}
} else {
char temp[16];
if (ports_spec.first[iteration] == ports_spec.second[iteration]) {
snprintf(temp, sizeof(temp), ",%d", ports_spec.first[iteration]);
} else {
snprintf(temp, sizeof(temp), ",%d,%d", ports_spec.first[iteration], ports_spec.second[iteration]);
}
strlcat(ports_spec.str, temp, sizeof(ports_spec.str));
}
} else {
FatalError(SC_ERR_FATAL,
"When using hardware flow bypass ports must be specified as segments. E.g. ports: [0-1, 0-2]");
}
#endif
} else { // !NapatechUseHWBypass()
if (strncmp(port->val, "all", 3) == 0) {
/* check that the sting in the config file is correctly specified */
if (stream_spec != CONFIG_SPECIFIER_UNDEFINED) {
FatalError(SC_ERR_FATAL,
"Only one Napatech port specifier type is allowed.");
}
stream_spec = CONFIG_SPECIFIER_RANGE;
ports_spec.all = true;
snprintf(ports_spec.str, sizeof (ports_spec.str), "all");
} else if (strchr(port->val, '-')) {
/* check that the sting in the config file is correctly specified */
if (stream_spec != CONFIG_SPECIFIER_UNDEFINED) {
FatalError(SC_ERR_FATAL,
"Only one Napatech port specifier is allowed when hardware bypass is disabled. (E.g. ports: [0-4], NOT ports: [0-1,2-3])");
}
stream_spec = CONFIG_SPECIFIER_RANGE;
ByteExtractStringUint8(&ports_spec.first[iteration], 10, 0, port->val);
ByteExtractStringUint8(&ports_spec.second[iteration], 10, 0, strchr(port->val, '-') + 1);
snprintf(ports_spec.str, sizeof (ports_spec.str), "(%d..%d)", ports_spec.first[iteration], ports_spec.second[iteration]);
} else {
/* check that the sting in the config file is correctly specified */
if (stream_spec == CONFIG_SPECIFIER_RANGE) {
FatalError(SC_ERR_FATAL,
"Napatech port range specifiers cannot be combined with individual stream specifiers.");
}
stream_spec = CONFIG_SPECIFIER_INDIVIDUAL;
ByteExtractStringUint8(&ports_spec.first[iteration], 10, 0, port->val);
/* Determine the ports to use on the NTPL assign statement*/
if (iteration == 0) {
snprintf(ports_spec.str, sizeof (ports_spec.str), "%s", port->val);
} else {
strlcat(ports_spec.str, ",", sizeof(ports_spec.str));
strlcat(ports_spec.str, port->val, sizeof(ports_spec.str));
}
}
} // if !NapatechUseHWBypass()
++iteration;
} /* TAILQ_FOREACH */
#ifdef NAPATECH_ENABLE_BYPASS
if (bypass_supported) {
if (is_inline) {
char inline_setup_cmd[512];
if (first_stream == last_stream) {
snprintf(inline_setup_cmd, sizeof (ntpl_cmd),
"Setup[TxDescriptor=Dyn;TxPorts=%s;RxCRC=False;TxPortPos=112;UseWL=True] = StreamId == %d",
ports_spec.str, first_stream);
} else {
snprintf(inline_setup_cmd, sizeof (ntpl_cmd),
"Setup[TxDescriptor=Dyn;TxPorts=%s;RxCRC=False;TxPortPos=112;UseWL=True] = StreamId == (%d..%d)",
ports_spec.str, first_stream, last_stream);
}
NapatechSetFilter(hconfig, inline_setup_cmd);
}
/* Build the NTPL command */
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=3;streamid=(%d..%d);colormask=0x10000000;"
"Descriptor=DYN3,length=24,colorbits=32,Offset0=Layer3Header[0],Offset1=Layer4Header[0]]= %s%s",
first_stream, last_stream, ports_spec.all ? "" : "port==", ports_spec.str);
NapatechSetFilter(hconfig, ntpl_cmd);
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=2;streamid=(%d..%d);colormask=0x11000000;"
"Descriptor=DYN3,length=24,colorbits=32,Offset0=Layer3Header[0],Offset1=Layer4Header[0]"
"]= %s%s and (Layer3Protocol==IPV4)",
first_stream, last_stream, ports_spec.all ? "" : "port==", ports_spec.str);
NapatechSetFilter(hconfig, ntpl_cmd);
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=2;streamid=(%d..%d);colormask=0x14000000;"
"Descriptor=DYN3,length=24,colorbits=32,Offset0=Layer3Header[0],Offset1=Layer4Header[0]]= %s%s and (Layer3Protocol==IPV6)",
first_stream, last_stream, ports_spec.all ? "" : "port==", ports_spec.str);
NapatechSetFilter(hconfig, ntpl_cmd);
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=2;streamid=(%d..%d);colormask=0x10100000;"
"Descriptor=DYN3,length=24,colorbits=32,Offset0=Layer3Header[0],Offset1=Layer4Header[0]]= %s%s and (Layer4Protocol==TCP)",
first_stream, last_stream, ports_spec.all ? "" : "port==", ports_spec.str);
NapatechSetFilter(hconfig, ntpl_cmd);
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=2;streamid=(%d..%d);colormask=0x10200000;"
"Descriptor=DYN3,length=24,colorbits=32,Offset0=Layer3Header[0],Offset1=Layer4Header[0]"
"]= %s%s and (Layer4Protocol==UDP)",
first_stream, last_stream, ports_spec.all ? "" : "port==", ports_spec.str);
NapatechSetFilter(hconfig, ntpl_cmd);
if (strlen(span_ports) > 0) {
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=2;streamid=(%d..%d);colormask=0x00001000;"
"Descriptor=DYN3,length=24,colorbits=32,Offset0=Layer3Header[0],Offset1=Layer4Header[0]"
"]= port==%s",
first_stream, last_stream, span_ports);
NapatechSetFilter(hconfig, ntpl_cmd);
}
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"KeyType[name=KT%u]={sw_32_32,sw_16_16}",
NAPATECH_KEYTYPE_IPV4);
NapatechSetFilter(hconfig, ntpl_cmd);
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"KeyDef[name=KDEF%u;KeyType=KT%u;ipprotocolfield=OUTER]=(Layer3Header[12]/32/32,Layer4Header[0]/16/16)",
NAPATECH_KEYTYPE_IPV4, NAPATECH_KEYTYPE_IPV4);
NapatechSetFilter(hconfig, ntpl_cmd);
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"KeyType[name=KT%u]={32,32,16,16}",
NAPATECH_KEYTYPE_IPV4_SPAN);
NapatechSetFilter(hconfig, ntpl_cmd);
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"KeyDef[name=KDEF%u;KeyType=KT%u;ipprotocolfield=OUTER;keysort=sorted]=(Layer3Header[12]/32,Layer3Header[16]/32,Layer4Header[0]/16,Layer4Header[2]/16)",
NAPATECH_KEYTYPE_IPV4_SPAN, NAPATECH_KEYTYPE_IPV4_SPAN);
NapatechSetFilter(hconfig, ntpl_cmd);
/* IPv6 5tuple for inline and tap ports */
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"KeyType[name=KT%u]={sw_128_128,sw_16_16}",
NAPATECH_KEYTYPE_IPV6);
NapatechSetFilter(hconfig, ntpl_cmd);
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"KeyDef[name=KDEF%u;KeyType=KT%u;ipprotocolfield=OUTER]=(Layer3Header[8]/128/128,Layer4Header[0]/16/16)",
NAPATECH_KEYTYPE_IPV6, NAPATECH_KEYTYPE_IPV6);
NapatechSetFilter(hconfig, ntpl_cmd);
/* IPv6 5tuple for SPAN Ports */
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"KeyType[name=KT%u]={128,128,16,16}",
NAPATECH_KEYTYPE_IPV6_SPAN);
NapatechSetFilter(hconfig, ntpl_cmd);
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"KeyDef[name=KDEF%u;KeyType=KT%u;ipprotocolfield=OUTER;keysort=sorted]=(Layer3Header[8]/128,Layer3Header[24]/128,Layer4Header[0]/16,Layer4Header[2]/16)",
NAPATECH_KEYTYPE_IPV6_SPAN, NAPATECH_KEYTYPE_IPV6_SPAN);
NapatechSetFilter(hconfig, ntpl_cmd);
int pair;
char ports_ntpl_a[64];
char ports_ntpl_b[64];
memset(ports_ntpl_a, 0, sizeof(ports_ntpl_a));
memset(ports_ntpl_b, 0, sizeof(ports_ntpl_b));
for (pair = 0; pair < iteration; ++pair) {
char port_str[8];
if (!is_span_port[ports_spec.first[pair]]) {
snprintf(port_str, sizeof(port_str), "%s%u ", strlen(ports_ntpl_a) == 0 ? "" : ",", ports_spec.first[pair]);
strlcat(ports_ntpl_a, port_str, sizeof(ports_ntpl_a));
snprintf(port_str, sizeof(port_str), "%s%u ", strlen(ports_ntpl_b) == 0 ? "" : ",", ports_spec.second[pair]);
strlcat(ports_ntpl_b, port_str, sizeof(ports_ntpl_b));
}
}
if (strlen(ports_ntpl_a) > 0) {
/* This is the assign for dropping upstream traffic */
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=1;streamid=drop;colormask=0x1]=(Layer3Protocol==IPV4)and(port == %s)and(Key(KDEF%u,KeyID=%u)==%u)",
ports_ntpl_a,
NAPATECH_KEYTYPE_IPV4,
NAPATECH_KEYTYPE_IPV4,
NAPATECH_FLOWTYPE_DROP);
NapatechSetFilter(hconfig, ntpl_cmd);
}
if (strlen(ports_ntpl_b) > 0) {
/* This is the assign for dropping downstream traffic */
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=1;streamid=drop;colormask=0x1]=(Layer3Protocol==IPV4)and(port == %s)and(Key(KDEF%u,KeyID=%u,fieldaction=swap)==%u)",
ports_ntpl_b, //ports_spec.str,
NAPATECH_KEYTYPE_IPV4,
NAPATECH_KEYTYPE_IPV4,
NAPATECH_FLOWTYPE_DROP);
NapatechSetFilter(hconfig, ntpl_cmd);
}
if (strlen(span_ports) > 0) {
/* This is the assign for dropping SPAN Port traffic */
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=1;streamid=drop;colormask=0x1]=(Layer3Protocol==IPV4)and(port == %s)and(Key(KDEF%u,KeyID=%u)==%u)",
span_ports,
NAPATECH_KEYTYPE_IPV4_SPAN,
NAPATECH_KEYTYPE_IPV4_SPAN,
NAPATECH_FLOWTYPE_DROP);
NapatechSetFilter(hconfig, ntpl_cmd);
}
if (is_inline) {
for (pair = 0; pair < iteration; ++pair) {
/* This is the assignment for forwarding traffic */
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=1;streamid=drop;DestinationPort=%d;colormask=0x2]=(Layer3Protocol==IPV4)and(port == %d)and(Key(KDEF%u,KeyID=%u)==%u)",
ports_spec.second[pair],
ports_spec.first[pair],
NAPATECH_KEYTYPE_IPV4,
NAPATECH_KEYTYPE_IPV4,
NAPATECH_FLOWTYPE_PASS);
NapatechSetFilter(hconfig, ntpl_cmd);
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=1;streamid=drop;DestinationPort=%d;colormask=0x2]=(Layer3Protocol==IPV4)and(port == %d)and(Key(KDEF%u,KeyID=%u,fieldaction=swap)==%u)",
ports_spec.first[pair],
ports_spec.second[pair],
NAPATECH_KEYTYPE_IPV4,
NAPATECH_KEYTYPE_IPV4,
NAPATECH_FLOWTYPE_PASS);
NapatechSetFilter(hconfig, ntpl_cmd);
}
}
if (strlen(ports_ntpl_a) > 0) {
/* This is the assign for dropping upstream traffic */
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=1;streamid=drop;colormask=0x1]=(Layer3Protocol==IPV6)and(port == %s)and(Key(KDEF%u,KeyID=%u)==%u)",
ports_ntpl_a,
NAPATECH_KEYTYPE_IPV6,
NAPATECH_KEYTYPE_IPV6,
NAPATECH_FLOWTYPE_DROP);
NapatechSetFilter(hconfig, ntpl_cmd);
}
if (strlen(ports_ntpl_b) > 0) {
/* This is the assign for dropping downstream traffic */
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=1;streamid=drop;colormask=0x1]=(Layer3Protocol==IPV6)and(port == %s)and(Key(KDEF%u,KeyID=%u,fieldaction=swap)==%u)",
ports_ntpl_b, //ports_spec.str,
NAPATECH_KEYTYPE_IPV6,
NAPATECH_KEYTYPE_IPV6,
NAPATECH_FLOWTYPE_DROP);
NapatechSetFilter(hconfig, ntpl_cmd);
}
if (strlen(span_ports) > 0) {
/* This is the assign for dropping SPAN Port traffic */
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=1;streamid=drop;colormask=0x1]=(Layer3Protocol==IPV6)and(port == %s)and(Key(KDEF%u,KeyID=%u)==%u)",
span_ports,
NAPATECH_KEYTYPE_IPV6_SPAN,
NAPATECH_KEYTYPE_IPV6_SPAN,
NAPATECH_FLOWTYPE_DROP);
NapatechSetFilter(hconfig, ntpl_cmd);
}
if (is_inline) {
for (pair = 0; pair < iteration; ++pair) {
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=1;streamid=drop;DestinationPort=%d;colormask=0x4]=(Layer3Protocol==IPV6)and(port==%d)and(Key(KDEF%u,KeyID=%u)==%u)",
ports_spec.second[pair],
ports_spec.first[pair],
NAPATECH_KEYTYPE_IPV6,
NAPATECH_KEYTYPE_IPV6,
NAPATECH_FLOWTYPE_PASS);
NapatechSetFilter(hconfig, ntpl_cmd);
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"assign[priority=1;streamid=drop;DestinationPort=%d;colormask=0x4]=(Layer3Protocol==IPV6)and(port==%d)and(Key(KDEF%u,KeyID=%u,fieldaction=swap)==%u)",
ports_spec.first[pair],
ports_spec.second[pair],
NAPATECH_KEYTYPE_IPV6,
NAPATECH_KEYTYPE_IPV6,
NAPATECH_FLOWTYPE_PASS);
NapatechSetFilter(hconfig, ntpl_cmd);
}
}
} else {
if (is_inline) {
FatalError(SC_ERR_FATAL,
"Napatech Inline operation not supported by this FPGA version.");
}
if (NapatechIsAutoConfigEnabled()){
snprintf(ntpl_cmd, sizeof (ntpl_cmd), "assign[streamid=(%d..%d);colormask=0x0] = %s%s",
first_stream, last_stream, ports_spec.all ? "" : "port==", ports_spec.str);
NapatechSetFilter(hconfig, ntpl_cmd);
}
}
#else /* NAPATECH_ENABLE_BYPASS */
snprintf(ntpl_cmd, sizeof (ntpl_cmd), "assign[streamid=(%d..%d)] = %s%s",
first_stream, last_stream, ports_spec.all ? "" : "port==", ports_spec.str);
NapatechSetFilter(hconfig, ntpl_cmd);
#endif /* !NAPATECH_ENABLE_BYPASS */
SCLogConfig("Host-buffer NUMA assignments: ");
int numa_nodes[MAX_HOSTBUFFERS];
uint32_t stream_id;
for (stream_id = first_stream; stream_id < last_stream; ++stream_id) {
char temp1[256];
char temp2[256];
uint32_t num_host_buffers = GetStreamNUMAs(stream_id, numa_nodes);
snprintf(temp1, 256, " stream %d: ", stream_id);
for (uint32_t hb_id = 0; hb_id < num_host_buffers; ++hb_id) {
snprintf(temp2, 256, "%d ", numa_nodes[hb_id]);
strlcat(temp1, temp2, sizeof(temp1));
}
SCLogConfig("%s", temp1);
}
if (first_stream == last_stream) {
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"Setup[state=active] = StreamId == %d",
first_stream);
} else {
snprintf(ntpl_cmd, sizeof (ntpl_cmd),
"Setup[state=active] = StreamId == (%d..%d)",
first_stream, last_stream);
}
NapatechSetFilter(hconfig, ntpl_cmd);
NT_ConfigClose(hconfig);
return status;
}
#endif // HAVE_NAPATECH
|
{
"pile_set_name": "Github"
}
|
// Variables
// --------------------------
@FontAwesomePath: "../font";
//@FontAwesomePath: "//netdna.bootstrapcdn.com/font-awesome/3.2.1/font"; // for referencing Bootstrap CDN font files directly
@FontAwesomeVersion: "3.2.1";
@borderColor: #eee;
@iconMuted: #eee;
@iconLight: #fff;
@iconDark: #333;
@icons-li-width: 30/14em;
@glass: "\f000";
@music: "\f001";
@search: "\f002";
@envelope-alt: "\f003";
@heart: "\f004";
@star: "\f005";
@star-empty: "\f006";
@user: "\f007";
@film: "\f008";
@th-large: "\f009";
@th: "\f00a";
@th-list: "\f00b";
@ok: "\f00c";
@remove: "\f00d";
@zoom-in: "\f00e";
@zoom-out: "\f010";
@off: "\f011";
@signal: "\f012";
@cog: "\f013";
@trash: "\f014";
@home: "\f015";
@file-alt: "\f016";
@time: "\f017";
@road: "\f018";
@download-alt: "\f019";
@download: "\f01a";
@upload: "\f01b";
@inbox: "\f01c";
@play-circle: "\f01d";
@repeat: "\f01e";
@refresh: "\f021";
@list-alt: "\f022";
@lock: "\f023";
@flag: "\f024";
@headphones: "\f025";
@volume-off: "\f026";
@volume-down: "\f027";
@volume-up: "\f028";
@qrcode: "\f029";
@barcode: "\f02a";
@tag: "\f02b";
@tags: "\f02c";
@book: "\f02d";
@bookmark: "\f02e";
@print: "\f02f";
@camera: "\f030";
@font: "\f031";
@bold: "\f032";
@italic: "\f033";
@text-height: "\f034";
@text-width: "\f035";
@align-left: "\f036";
@align-center: "\f037";
@align-right: "\f038";
@align-justify: "\f039";
@list: "\f03a";
@indent-left: "\f03b";
@indent-right: "\f03c";
@facetime-video: "\f03d";
@picture: "\f03e";
@pencil: "\f040";
@map-marker: "\f041";
@adjust: "\f042";
@tint: "\f043";
@edit: "\f044";
@share: "\f045";
@check: "\f046";
@move: "\f047";
@step-backward: "\f048";
@fast-backward: "\f049";
@backward: "\f04a";
@play: "\f04b";
@pause: "\f04c";
@stop: "\f04d";
@forward: "\f04e";
@fast-forward: "\f050";
@step-forward: "\f051";
@eject: "\f052";
@chevron-left: "\f053";
@chevron-right: "\f054";
@plus-sign: "\f055";
@minus-sign: "\f056";
@remove-sign: "\f057";
@ok-sign: "\f058";
@question-sign: "\f059";
@info-sign: "\f05a";
@screenshot: "\f05b";
@remove-circle: "\f05c";
@ok-circle: "\f05d";
@ban-circle: "\f05e";
@arrow-left: "\f060";
@arrow-right: "\f061";
@arrow-up: "\f062";
@arrow-down: "\f063";
@share-alt: "\f064";
@resize-full: "\f065";
@resize-small: "\f066";
@plus: "\f067";
@minus: "\f068";
@asterisk: "\f069";
@exclamation-sign: "\f06a";
@gift: "\f06b";
@leaf: "\f06c";
@fire: "\f06d";
@eye-open: "\f06e";
@eye-close: "\f070";
@warning-sign: "\f071";
@plane: "\f072";
@calendar: "\f073";
@random: "\f074";
@comment: "\f075";
@magnet: "\f076";
@chevron-up: "\f077";
@chevron-down: "\f078";
@retweet: "\f079";
@shopping-cart: "\f07a";
@folder-close: "\f07b";
@folder-open: "\f07c";
@resize-vertical: "\f07d";
@resize-horizontal: "\f07e";
@bar-chart: "\f080";
@twitter-sign: "\f081";
@facebook-sign: "\f082";
@camera-retro: "\f083";
@key: "\f084";
@cogs: "\f085";
@comments: "\f086";
@thumbs-up-alt: "\f087";
@thumbs-down-alt: "\f088";
@star-half: "\f089";
@heart-empty: "\f08a";
@signout: "\f08b";
@linkedin-sign: "\f08c";
@pushpin: "\f08d";
@external-link: "\f08e";
@signin: "\f090";
@trophy: "\f091";
@github-sign: "\f092";
@upload-alt: "\f093";
@lemon: "\f094";
@phone: "\f095";
@check-empty: "\f096";
@bookmark-empty: "\f097";
@phone-sign: "\f098";
@twitter: "\f099";
@facebook: "\f09a";
@github: "\f09b";
@unlock: "\f09c";
@credit-card: "\f09d";
@rss: "\f09e";
@hdd: "\f0a0";
@bullhorn: "\f0a1";
@bell: "\f0a2";
@certificate: "\f0a3";
@hand-right: "\f0a4";
@hand-left: "\f0a5";
@hand-up: "\f0a6";
@hand-down: "\f0a7";
@circle-arrow-left: "\f0a8";
@circle-arrow-right: "\f0a9";
@circle-arrow-up: "\f0aa";
@circle-arrow-down: "\f0ab";
@globe: "\f0ac";
@wrench: "\f0ad";
@tasks: "\f0ae";
@filter: "\f0b0";
@briefcase: "\f0b1";
@fullscreen: "\f0b2";
@group: "\f0c0";
@link: "\f0c1";
@cloud: "\f0c2";
@beaker: "\f0c3";
@cut: "\f0c4";
@copy: "\f0c5";
@paper-clip: "\f0c6";
@save: "\f0c7";
@sign-blank: "\f0c8";
@reorder: "\f0c9";
@list-ul: "\f0ca";
@list-ol: "\f0cb";
@strikethrough: "\f0cc";
@underline: "\f0cd";
@table: "\f0ce";
@magic: "\f0d0";
@truck: "\f0d1";
@pinterest: "\f0d2";
@pinterest-sign: "\f0d3";
@google-plus-sign: "\f0d4";
@google-plus: "\f0d5";
@money: "\f0d6";
@caret-down: "\f0d7";
@caret-up: "\f0d8";
@caret-left: "\f0d9";
@caret-right: "\f0da";
@columns: "\f0db";
@sort: "\f0dc";
@sort-down: "\f0dd";
@sort-up: "\f0de";
@envelope: "\f0e0";
@linkedin: "\f0e1";
@undo: "\f0e2";
@legal: "\f0e3";
@dashboard: "\f0e4";
@comment-alt: "\f0e5";
@comments-alt: "\f0e6";
@bolt: "\f0e7";
@sitemap: "\f0e8";
@umbrella: "\f0e9";
@paste: "\f0ea";
@lightbulb: "\f0eb";
@exchange: "\f0ec";
@cloud-download: "\f0ed";
@cloud-upload: "\f0ee";
@user-md: "\f0f0";
@stethoscope: "\f0f1";
@suitcase: "\f0f2";
@bell-alt: "\f0f3";
@coffee: "\f0f4";
@food: "\f0f5";
@file-text-alt: "\f0f6";
@building: "\f0f7";
@hospital: "\f0f8";
@ambulance: "\f0f9";
@medkit: "\f0fa";
@fighter-jet: "\f0fb";
@beer: "\f0fc";
@h-sign: "\f0fd";
@plus-sign-alt: "\f0fe";
@double-angle-left: "\f100";
@double-angle-right: "\f101";
@double-angle-up: "\f102";
@double-angle-down: "\f103";
@angle-left: "\f104";
@angle-right: "\f105";
@angle-up: "\f106";
@angle-down: "\f107";
@desktop: "\f108";
@laptop: "\f109";
@tablet: "\f10a";
@mobile-phone: "\f10b";
@circle-blank: "\f10c";
@quote-left: "\f10d";
@quote-right: "\f10e";
@spinner: "\f110";
@circle: "\f111";
@reply: "\f112";
@github-alt: "\f113";
@folder-close-alt: "\f114";
@folder-open-alt: "\f115";
@expand-alt: "\f116";
@collapse-alt: "\f117";
@smile: "\f118";
@frown: "\f119";
@meh: "\f11a";
@gamepad: "\f11b";
@keyboard: "\f11c";
@flag-alt: "\f11d";
@flag-checkered: "\f11e";
@terminal: "\f120";
@code: "\f121";
@reply-all: "\f122";
@mail-reply-all: "\f122";
@star-half-empty: "\f123";
@location-arrow: "\f124";
@crop: "\f125";
@code-fork: "\f126";
@unlink: "\f127";
@question: "\f128";
@info: "\f129";
@exclamation: "\f12a";
@superscript: "\f12b";
@subscript: "\f12c";
@eraser: "\f12d";
@puzzle-piece: "\f12e";
@microphone: "\f130";
@microphone-off: "\f131";
@shield: "\f132";
@calendar-empty: "\f133";
@fire-extinguisher: "\f134";
@rocket: "\f135";
@maxcdn: "\f136";
@chevron-sign-left: "\f137";
@chevron-sign-right: "\f138";
@chevron-sign-up: "\f139";
@chevron-sign-down: "\f13a";
@html5: "\f13b";
@css3: "\f13c";
@anchor: "\f13d";
@unlock-alt: "\f13e";
@bullseye: "\f140";
@ellipsis-horizontal: "\f141";
@ellipsis-vertical: "\f142";
@rss-sign: "\f143";
@play-sign: "\f144";
@ticket: "\f145";
@minus-sign-alt: "\f146";
@check-minus: "\f147";
@level-up: "\f148";
@level-down: "\f149";
@check-sign: "\f14a";
@edit-sign: "\f14b";
@external-link-sign: "\f14c";
@share-sign: "\f14d";
@compass: "\f14e";
@collapse: "\f150";
@collapse-top: "\f151";
@expand: "\f152";
@eur: "\f153";
@gbp: "\f154";
@usd: "\f155";
@inr: "\f156";
@jpy: "\f157";
@cny: "\f158";
@krw: "\f159";
@btc: "\f15a";
@file: "\f15b";
@file-text: "\f15c";
@sort-by-alphabet: "\f15d";
@sort-by-alphabet-alt: "\f15e";
@sort-by-attributes: "\f160";
@sort-by-attributes-alt: "\f161";
@sort-by-order: "\f162";
@sort-by-order-alt: "\f163";
@thumbs-up: "\f164";
@thumbs-down: "\f165";
@youtube-sign: "\f166";
@youtube: "\f167";
@xing: "\f168";
@xing-sign: "\f169";
@youtube-play: "\f16a";
@dropbox: "\f16b";
@stackexchange: "\f16c";
@instagram: "\f16d";
@flickr: "\f16e";
@adn: "\f170";
@bitbucket: "\f171";
@bitbucket-sign: "\f172";
@tumblr: "\f173";
@tumblr-sign: "\f174";
@long-arrow-down: "\f175";
@long-arrow-up: "\f176";
@long-arrow-left: "\f177";
@long-arrow-right: "\f178";
@apple: "\f179";
@windows: "\f17a";
@android: "\f17b";
@linux: "\f17c";
@dribbble: "\f17d";
@skype: "\f17e";
@foursquare: "\f180";
@trello: "\f181";
@female: "\f182";
@male: "\f183";
@gittip: "\f184";
@sun: "\f185";
@moon: "\f186";
@archive: "\f187";
@bug: "\f188";
@vk: "\f189";
@weibo: "\f18a";
@renren: "\f18b";
|
{
"pile_set_name": "Github"
}
|
// RUN: %target-typecheck-verify-swift
// https://bugs.swift.org/browse/SR-4426
// '#if' in top-level code that contains only decls should not disturb forward reference.
typealias A = B
#if false
func foo() {}
#endif
struct B {}
typealias C = D
#if true
print("ok")
#endif
struct D {}
|
{
"pile_set_name": "Github"
}
|
#!/bin/bash
#xterm -e "roscore" &
#sleep 2
xterm -e "source ~/.bashrc && roscd roscpp_tutorials/bin; ./talker" &
xterm -e "source ~/.bashrc && roscd roscpp_tutorials/bin; ./listener chatter:=chatter_relay" &
./relay chatter
|
{
"pile_set_name": "Github"
}
|
name=Caller of Gales
image=https://magiccards.info/scans/en/cn2/103.jpg
image_updated=2017-08-29
value=2.761
rarity=C
type=Creature
subtype=Merfolk,Wizard
cost={U}
pt=1/1
ability={1}{U}, {T}: Target creature gains flying until end of turn.
timing=main
oracle={1}{U}, {T}: Target creature gains flying until end of turn.
|
{
"pile_set_name": "Github"
}
|
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!114 &11400000
MonoBehaviour:
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: 9d1de2691495b304e9843ac3335b626d, type: 3}
m_Name: suits_ablativecoat
m_EditorClassIdentifier:
Variance:
- Frames:
- sprite: {fileID: 3439455209659141063, guid: 3161f951d8d0dd3499fa569aa36abb47,
type: 3}
secondDelay: 0.5
- sprite: {fileID: -8495603921139501215, guid: 3161f951d8d0dd3499fa569aa36abb47,
type: 3}
secondDelay: 0.5
- Frames:
- sprite: {fileID: -8885038089760734150, guid: 3161f951d8d0dd3499fa569aa36abb47,
type: 3}
secondDelay: 0.5
- sprite: {fileID: 3373184668826348280, guid: 3161f951d8d0dd3499fa569aa36abb47,
type: 3}
secondDelay: 0.5
- Frames:
- sprite: {fileID: -6608304276669693891, guid: 3161f951d8d0dd3499fa569aa36abb47,
type: 3}
secondDelay: 0.5
- sprite: {fileID: 2785921676510788815, guid: 3161f951d8d0dd3499fa569aa36abb47,
type: 3}
secondDelay: 0.5
- Frames:
- sprite: {fileID: 7097116958859515021, guid: 3161f951d8d0dd3499fa569aa36abb47,
type: 3}
secondDelay: 0.5
- sprite: {fileID: -980370848950497714, guid: 3161f951d8d0dd3499fa569aa36abb47,
type: 3}
secondDelay: 0.5
IsPalette: 0
setID: 20782
|
{
"pile_set_name": "Github"
}
|
using SDKPackage.Facade;
using SDKPackage.Utils;
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
namespace SDKPackage.PJPackage
{
public partial class SelectGameVersionList : System.Web.UI.Page
{
protected string platform = GameRequest.GetQueryString("platform");
protected string gameId = GameRequest.GetQueryString("gameid");
protected string gameName = GameRequest.GetQueryString("gameName");
protected string gameDisplayName = GameRequest.GetQueryString("gameDisplayName");
protected string gamenamespell = GameRequest.GetQueryString("gamenamespell");
protected string taskid = GameRequest.GetQueryString("taskid");
protected int rstatus = 1;
protected bool isBack = false;
NativeWebFacade aideNativeWebFacade = new NativeWebFacade();
protected void Page_Load(object sender, EventArgs e)
{
if (!string.IsNullOrEmpty(taskid))
isBack = true;
if (Cache["Roleid"] == null || Cache["Roleid"].ToString() == "" || Cache["Roles"] == null)
{
BindingCache();
}
if (((HashSet<string>)Cache["Roles"]).Contains("1") ||
((HashSet<string>)Cache["Roles"]).Contains("2") ||
((HashSet<string>)Cache["Roles"]).Contains("3"))
SqlDataSource1.SelectParameters.Add("Status", DbType.Int32, "0");
else
SqlDataSource1.SelectParameters.Add("Status", DbType.Int32, "1");
}
private void BindingCache()
{
string sql = string.Format(@" select * from [AspNetUserRoles] r inner join AspNetUsers u on r.UserId=u.Id and u.UserName='{0}'", Context.User.Identity.Name);// and RoleId in (2,3)
DataSet ds = aideNativeWebFacade.GetDataSetBySql(sql);
DataView dv = ds.Tables[0].DefaultView;
dv.RowFilter = "RoleId IN (2,3)";
DataTable newTable = dv.ToTable();
var roles = new HashSet<string>();
foreach (DataRow r in ds.Tables[0].Rows)
{
//var c = r["RoleId"];
roles.Add(r["RoleId"].ToString());
}
Cache["Roles"] = roles;
if (newTable.Rows.Count > 0)
Cache["Roleid"] = "0";
else
Cache["Roleid"] = "1";
}
protected void GameVersionList_ItemCommand(object sender, ListViewCommandEventArgs e)
{
if (e.CommandName == "del")
{
string[] arr = e.CommandArgument.ToString().Split(',');
string id = arr[0];
string SDKPackageDir = "";//SDKAndroidPackageGameFile
if (platform == "Android")
{
SDKPackageDir = System.Configuration.ConfigurationManager.AppSettings["SDKAndroidPackageGameFile"] + gameName + "\\" + arr[1];
}
else
{
string[] split = new string[] { ".zip_" };
SDKPackageDir = System.Configuration.ConfigurationManager.AppSettings["SDKIOSPackageGameFile"] + gamenamespell + "\\" + arr[1].Split(split, StringSplitOptions.None)[1];
}
if (System.IO.Directory.Exists(SDKPackageDir))
{
System.IO.Directory.Delete(SDKPackageDir, true);
}
string sql = string.Format(@"delete from sdk_UploadPackageInfo where id={0}", id);
aideNativeWebFacade.ExecuteSql(sql);
this.GameVersionList.DataBind();
}
}
}
}
|
{
"pile_set_name": "Github"
}
|
#
# Copyright (c) 2020 Project nlbuild-autotools Authors. All Rights Reserved.
# Copyright (c) 2016 Nest Labs Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Description:
# This file is the GNU automake template for the @PACKAGE_DESCRIPTION@
# unit tests.
#
include $(abs_top_nlbuild_autotools_dir)/automake/pre.am
#
# Local headers to build against and distribute but not to install
# since they are not part of the package.
#
noinst_HEADERS = \
@PACKAGE_SHORT_LOWER@-test.h \
$(NULL)
#
# Other files we do want to distribute with the package.
#
EXTRA_DIST = \
$(NULL)
if @PACKAGE_SHORT_UPPER@_BUILD_TESTS
# C preprocessor option flags that will apply to all compiled objects in this
# makefile.
AM_CPPFLAGS = \
-I$(top_srcdir)/include \
$(NULL)
COMMON_LDADD = \
-L${top_builddir}/src -l@PACKAGE_SHORT_LOWER@
$(NULL)
# Test applications that should be run when the 'check' target is run.
check_PROGRAMS = \
@PACKAGE_SHORT_LOWER@-test \
@PACKAGE_SHORT_LOWER@-test-cxx \
$(NULL)
# Test applications and scripts that should be built and run when the
# 'check' target is run.
TESTS = \
$(check_PROGRAMS) \
$(NULL)
# The additional environment variables and their values that will be
# made available to all programs and scripts in TESTS.
TESTS_ENVIRONMENT = \
$(NULL)
# Source, compiler, and linker options for test programs.
@PACKAGE_SHORT_LOWER@_test_LDADD = $(COMMON_LDADD)
@PACKAGE_SHORT_LOWER@_test_SOURCES = @PACKAGE_SHORT_LOWER@-test.c
@PACKAGE_SHORT_LOWER@_test_cxx_LDADD = $(COMMON_LDADD)
@PACKAGE_SHORT_LOWER@_test_cxx_SOURCES = @PACKAGE_SHORT_LOWER@-test-cxx.cpp
# Foreign make dependencies
NLFOREIGN_FILE_DEPENDENCIES = \
$(NULL)
NLFOREIGN_SUBDIR_DEPENDENCIES = \
$(NLUNIT_TEST_FOREIGN_SUBDIR_DEPENDENCY) \
${top_builddir}/src \
$(NULL)
$(check_PROGRAMS): $(NLFOREIGN_FILE_DEPENDENCIES)
if @PACKAGE_SHORT_UPPER@_BUILD_COVERAGE
CLEANFILES = $(wildcard *.gcda *.gcno)
if @PACKAGE_SHORT_UPPER@_BUILD_COVERAGE_REPORTS
# The bundle should positively be qualified with the absolute build
# path. Otherwise, VPATH will get auto-prefixed to it if there is
# already such a directory in the non-colocated source tree.
@PACKAGE_SHORT_UPPER@_COVERAGE_BUNDLE = ${abs_builddir}/${PACKAGE}${NL_COVERAGE_BUNDLE_SUFFIX}
@PACKAGE_SHORT_UPPER@_COVERAGE_INFO = ${@PACKAGE_SHORT_UPPER@_COVERAGE_BUNDLE}/${PACKAGE}${NL_COVERAGE_INFO_SUFFIX}
$(@PACKAGE_SHORT_UPPER@_COVERAGE_BUNDLE):
$(call create-directory)
$(@PACKAGE_SHORT_UPPER@_COVERAGE_INFO): check-local | $(@PACKAGE_SHORT_UPPER@_COVERAGE_BUNDLE)
$(call generate-coverage-report,${top_builddir})
coverage-local: $(@PACKAGE_SHORT_UPPER@_COVERAGE_INFO)
clean-local: clean-local-coverage
.PHONY: clean-local-coverage
clean-local-coverage:
-$(AM_V_at)rm -rf $(@PACKAGE_SHORT_UPPER@_COVERAGE_BUNDLE)
endif # @PACKAGE_SHORT_UPPER@_BUILD_COVERAGE_REPORTS
endif # @PACKAGE_SHORT_UPPER@_BUILD_COVERAGE
endif # @PACKAGE_SHORT_UPPER@_BUILD_TESTS
include $(abs_top_nlbuild_autotools_dir)/automake/post.am
|
{
"pile_set_name": "Github"
}
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Evaluate Object Detection result on a single image.
Annotate each detected result as true positives or false positive according to
a predefined IOU ratio. Non Maximum Supression is used by default. Multi class
detection is supported by default.
"""
import numpy as np
from object_detection.utils import np_box_list
from object_detection.utils import np_box_list_ops
class PerImageEvaluation(object):
"""Evaluate detection result of a single image."""
def __init__(self,
num_groundtruth_classes,
matching_iou_threshold=0.5,
nms_iou_threshold=0.3,
nms_max_output_boxes=50):
"""Initialized PerImageEvaluation by evaluation parameters.
Args:
num_groundtruth_classes: Number of ground truth object classes
matching_iou_threshold: A ratio of area intersection to union, which is
the threshold to consider whether a detection is true positive or not
nms_iou_threshold: IOU threshold used in Non Maximum Suppression.
nms_max_output_boxes: Number of maximum output boxes in NMS.
"""
self.matching_iou_threshold = matching_iou_threshold
self.nms_iou_threshold = nms_iou_threshold
self.nms_max_output_boxes = nms_max_output_boxes
self.num_groundtruth_classes = num_groundtruth_classes
def compute_object_detection_metrics(
self, detected_boxes, detected_scores, detected_class_labels,
groundtruth_boxes, groundtruth_class_labels,
groundtruth_is_difficult_lists, groundtruth_is_group_of_list):
"""Evaluates detections as being tp, fp or ignored from a single image.
The evaluation is done in two stages:
1. All detections are matched to non group-of boxes; true positives are
determined and detections matched to difficult boxes are ignored.
2. Detections that are determined as false positives are matched against
group-of boxes and ignored if matched.
Args:
detected_boxes: A float numpy array of shape [N, 4], representing N
regions of detected object regions.
Each row is of the format [y_min, x_min, y_max, x_max]
detected_scores: A float numpy array of shape [N, 1], representing
the confidence scores of the detected N object instances.
detected_class_labels: A integer numpy array of shape [N, 1], repreneting
the class labels of the detected N object instances.
groundtruth_boxes: A float numpy array of shape [M, 4], representing M
regions of object instances in ground truth
groundtruth_class_labels: An integer numpy array of shape [M, 1],
representing M class labels of object instances in ground truth
groundtruth_is_difficult_lists: A boolean numpy array of length M denoting
whether a ground truth box is a difficult instance or not
groundtruth_is_group_of_list: A boolean numpy array of length M denoting
whether a ground truth box has group-of tag
Returns:
scores: A list of C float numpy arrays. Each numpy array is of
shape [K, 1], representing K scores detected with object class
label c
tp_fp_labels: A list of C boolean numpy arrays. Each numpy array
is of shape [K, 1], representing K True/False positive label of
object instances detected with class label c
is_class_correctly_detected_in_image: a numpy integer array of
shape [C, 1], indicating whether the correponding class has a least
one instance being correctly detected in the image
"""
detected_boxes, detected_scores, detected_class_labels = (
self._remove_invalid_boxes(detected_boxes, detected_scores,
detected_class_labels))
scores, tp_fp_labels = self._compute_tp_fp(
detected_boxes, detected_scores, detected_class_labels,
groundtruth_boxes, groundtruth_class_labels,
groundtruth_is_difficult_lists, groundtruth_is_group_of_list)
is_class_correctly_detected_in_image = self._compute_cor_loc(
detected_boxes, detected_scores, detected_class_labels,
groundtruth_boxes, groundtruth_class_labels)
return scores, tp_fp_labels, is_class_correctly_detected_in_image
def _compute_cor_loc(self, detected_boxes, detected_scores,
detected_class_labels, groundtruth_boxes,
groundtruth_class_labels):
"""Compute CorLoc score for object detection result.
Args:
detected_boxes: A float numpy array of shape [N, 4], representing N
regions of detected object regions.
Each row is of the format [y_min, x_min, y_max, x_max]
detected_scores: A float numpy array of shape [N, 1], representing
the confidence scores of the detected N object instances.
detected_class_labels: A integer numpy array of shape [N, 1], repreneting
the class labels of the detected N object instances.
groundtruth_boxes: A float numpy array of shape [M, 4], representing M
regions of object instances in ground truth
groundtruth_class_labels: An integer numpy array of shape [M, 1],
representing M class labels of object instances in ground truth
Returns:
is_class_correctly_detected_in_image: a numpy integer array of
shape [C, 1], indicating whether the correponding class has a least
one instance being correctly detected in the image
"""
is_class_correctly_detected_in_image = np.zeros(
self.num_groundtruth_classes, dtype=int)
for i in range(self.num_groundtruth_classes):
gt_boxes_at_ith_class = groundtruth_boxes[groundtruth_class_labels ==
i, :]
detected_boxes_at_ith_class = detected_boxes[detected_class_labels ==
i, :]
detected_scores_at_ith_class = detected_scores[detected_class_labels == i]
is_class_correctly_detected_in_image[i] = (
self._compute_is_aclass_correctly_detected_in_image(
detected_boxes_at_ith_class, detected_scores_at_ith_class,
gt_boxes_at_ith_class))
return is_class_correctly_detected_in_image
def _compute_is_aclass_correctly_detected_in_image(
self, detected_boxes, detected_scores, groundtruth_boxes):
"""Compute CorLoc score for a single class.
Args:
detected_boxes: A numpy array of shape [N, 4] representing detected box
coordinates
detected_scores: A 1-d numpy array of length N representing classification
score
groundtruth_boxes: A numpy array of shape [M, 4] representing ground truth
box coordinates
Returns:
is_class_correctly_detected_in_image: An integer 1 or 0 denoting whether a
class is correctly detected in the image or not
"""
if detected_boxes.size > 0:
if groundtruth_boxes.size > 0:
max_score_id = np.argmax(detected_scores)
detected_boxlist = np_box_list.BoxList(
np.expand_dims(detected_boxes[max_score_id, :], axis=0))
gt_boxlist = np_box_list.BoxList(groundtruth_boxes)
iou = np_box_list_ops.iou(detected_boxlist, gt_boxlist)
if np.max(iou) >= self.matching_iou_threshold:
return 1
return 0
def _compute_tp_fp(self, detected_boxes, detected_scores,
detected_class_labels, groundtruth_boxes,
groundtruth_class_labels, groundtruth_is_difficult_lists,
groundtruth_is_group_of_list):
"""Labels true/false positives of detections of an image across all classes.
Args:
detected_boxes: A float numpy array of shape [N, 4], representing N
regions of detected object regions.
Each row is of the format [y_min, x_min, y_max, x_max]
detected_scores: A float numpy array of shape [N, 1], representing
the confidence scores of the detected N object instances.
detected_class_labels: A integer numpy array of shape [N, 1], repreneting
the class labels of the detected N object instances.
groundtruth_boxes: A float numpy array of shape [M, 4], representing M
regions of object instances in ground truth
groundtruth_class_labels: An integer numpy array of shape [M, 1],
representing M class labels of object instances in ground truth
groundtruth_is_difficult_lists: A boolean numpy array of length M denoting
whether a ground truth box is a difficult instance or not
groundtruth_is_group_of_list: A boolean numpy array of length M denoting
whether a ground truth box has group-of tag
Returns:
result_scores: A list of float numpy arrays. Each numpy array is of
shape [K, 1], representing K scores detected with object class
label c
result_tp_fp_labels: A list of boolean numpy array. Each numpy array is of
shape [K, 1], representing K True/False positive label of object
instances detected with class label c
"""
result_scores = []
result_tp_fp_labels = []
for i in range(self.num_groundtruth_classes):
gt_boxes_at_ith_class = groundtruth_boxes[(groundtruth_class_labels == i
), :]
groundtruth_is_difficult_list_at_ith_class = (
groundtruth_is_difficult_lists[groundtruth_class_labels == i])
groundtruth_is_group_of_list_at_ith_class = (
groundtruth_is_group_of_list[groundtruth_class_labels == i])
detected_boxes_at_ith_class = detected_boxes[(detected_class_labels == i
), :]
detected_scores_at_ith_class = detected_scores[detected_class_labels == i]
scores, tp_fp_labels = self._compute_tp_fp_for_single_class(
detected_boxes_at_ith_class, detected_scores_at_ith_class,
gt_boxes_at_ith_class, groundtruth_is_difficult_list_at_ith_class,
groundtruth_is_group_of_list_at_ith_class)
result_scores.append(scores)
result_tp_fp_labels.append(tp_fp_labels)
return result_scores, result_tp_fp_labels
def _remove_invalid_boxes(self, detected_boxes, detected_scores,
detected_class_labels):
valid_indices = np.logical_and(detected_boxes[:, 0] < detected_boxes[:, 2],
detected_boxes[:, 1] < detected_boxes[:, 3])
return (detected_boxes[valid_indices, :], detected_scores[valid_indices],
detected_class_labels[valid_indices])
def _compute_tp_fp_for_single_class(
self, detected_boxes, detected_scores, groundtruth_boxes,
groundtruth_is_difficult_list, groundtruth_is_group_of_list):
"""Labels boxes detected with the same class from the same image as tp/fp.
Args:
detected_boxes: A numpy array of shape [N, 4] representing detected box
coordinates
detected_scores: A 1-d numpy array of length N representing classification
score
groundtruth_boxes: A numpy array of shape [M, 4] representing ground truth
box coordinates
groundtruth_is_difficult_list: A boolean numpy array of length M denoting
whether a ground truth box is a difficult instance or not. If a
groundtruth box is difficult, every detection matching this box
is ignored.
groundtruth_is_group_of_list: A boolean numpy array of length M denoting
whether a ground truth box has group-of tag. If a groundtruth box
is group-of box, every detection matching this box is ignored.
Returns:
Two arrays of the same size, containing all boxes that were evaluated as
being true positives or false positives; if a box matched to a difficult
box or to a group-of box, it is ignored.
scores: A numpy array representing the detection scores.
tp_fp_labels: a boolean numpy array indicating whether a detection is a
true positive.
"""
if detected_boxes.size == 0:
return np.array([], dtype=float), np.array([], dtype=bool)
detected_boxlist = np_box_list.BoxList(detected_boxes)
detected_boxlist.add_field('scores', detected_scores)
detected_boxlist = np_box_list_ops.non_max_suppression(
detected_boxlist, self.nms_max_output_boxes, self.nms_iou_threshold)
scores = detected_boxlist.get_field('scores')
if groundtruth_boxes.size == 0:
return scores, np.zeros(detected_boxlist.num_boxes(), dtype=bool)
tp_fp_labels = np.zeros(detected_boxlist.num_boxes(), dtype=bool)
is_matched_to_difficult_box = np.zeros(
detected_boxlist.num_boxes(), dtype=bool)
is_matched_to_group_of_box = np.zeros(
detected_boxlist.num_boxes(), dtype=bool)
# The evaluation is done in two stages:
# 1. All detections are matched to non group-of boxes; true positives are
# determined and detections matched to difficult boxes are ignored.
# 2. Detections that are determined as false positives are matched against
# group-of boxes and ignored if matched.
# Tp-fp evaluation for non-group of boxes (if any).
gt_non_group_of_boxlist = np_box_list.BoxList(
groundtruth_boxes[~groundtruth_is_group_of_list, :])
if gt_non_group_of_boxlist.num_boxes() > 0:
groundtruth_nongroup_of_is_difficult_list = groundtruth_is_difficult_list[
~groundtruth_is_group_of_list]
iou = np_box_list_ops.iou(detected_boxlist, gt_non_group_of_boxlist)
max_overlap_gt_ids = np.argmax(iou, axis=1)
is_gt_box_detected = np.zeros(
gt_non_group_of_boxlist.num_boxes(), dtype=bool)
for i in range(detected_boxlist.num_boxes()):
gt_id = max_overlap_gt_ids[i]
if iou[i, gt_id] >= self.matching_iou_threshold:
if not groundtruth_nongroup_of_is_difficult_list[gt_id]:
if not is_gt_box_detected[gt_id]:
tp_fp_labels[i] = True
is_gt_box_detected[gt_id] = True
else:
is_matched_to_difficult_box[i] = True
# Tp-fp evaluation for group of boxes.
gt_group_of_boxlist = np_box_list.BoxList(
groundtruth_boxes[groundtruth_is_group_of_list, :])
if gt_group_of_boxlist.num_boxes() > 0:
ioa = np_box_list_ops.ioa(gt_group_of_boxlist, detected_boxlist)
max_overlap_group_of_gt = np.max(ioa, axis=0)
for i in range(detected_boxlist.num_boxes()):
if (not tp_fp_labels[i] and not is_matched_to_difficult_box[i] and
max_overlap_group_of_gt[i] >= self.matching_iou_threshold):
is_matched_to_group_of_box[i] = True
return scores[~is_matched_to_difficult_box
& ~is_matched_to_group_of_box], tp_fp_labels[
~is_matched_to_difficult_box
& ~is_matched_to_group_of_box]
|
{
"pile_set_name": "Github"
}
|
<?php
/**
* Wolf CMS backup_restore plugin language file
*
* @package Translations
*/
return array(
'Are you sure you wish to restore?' => 'Biztosan szeretnéd visszaállítani a korábbi állapotot?',
'As such, the contents of your backup file will replace the contents of your Wolf CMS database.' => 'As such, the contents of your backup file will replace the contents of your Wolf CMS database.',
'Backup Restore' => 'Mentés & Visszaállítás',
'Backup file was not uploaded correctly/completely or is broken.' => 'Backup file was not uploaded correctly/completely or is broken.',
'Backup settings' => 'Mentés beállítások',
'Backup/Restore plugin' => 'Mentés/Visszaállítás kiegészítő',
'By default, the download is generated in a zip file. If you want to download the plain unzipped XML file, go to the settings for this plugin and change the option there.' => 'By default, the download is generated in a zip file. If you want to download the plain unzipped XML file, go to the settings for this plugin and change the option there.',
'Create a backup' => 'Mentés készítése',
'Creating the backup' => 'Mentés készítése folyamatban',
'Current style' => 'Current style',
'Designed for Wolf version' => 'Designed for Wolf version',
'Do NOT upload a zip file, only upload a plain text XML file!' => 'Do NOT upload a zip file, only upload a plain text XML file!',
'Do you want to download the backup as a zip file?' => 'Do you want to download the backup as a zip file?',
'Do you want to include passwords in the backup file? <br/> If you select no, all passwords will be reset upon restoring the backup.' => 'Do you want to include passwords in the backup file? <br/> If you select no, all passwords will be reset upon restoring the backup.',
'Do you want to include uploaded files in the backup file?' => 'Do you want to include uploaded files in the backup file?',
'Documentation' => 'Dokumentáció',
'Erase files' => 'Erase files',
'Erase uploaded files before restoring backup?' => 'Erase uploaded files before restoring backup?',
'Example:' => 'Példa:',
'Filename extension' => 'Fájlnév kitejesztés',
'Filename timestamp style' => 'Fájlnév dátum formátuma',
'If no password is provided in the backup file, reset all password fields to this default.' => 'If no password is provided in the backup file, reset all password fields to this default.',
'In addition, if enabled in the settings, the contents of your uploaded files directory will be erased and may be overwritten from any files contained in the backup.' => 'In addition, if enabled in the settings, the contents of your uploaded files directory will be erased and may be overwritten from any files contained in the backup.',
'Include files' => 'Fájlok mentése',
'Include passwords' => 'Jelszavak mentése',
'No' => 'Nem',
'Package as zip file' => 'Package as zip file',
'Please be aware that <strong>all</strong> the database tables will be truncated when performing a restore. Truncating a table means that all records in that table are deleted.' => 'Please be aware that <strong>all</strong> the database tables will be truncated when performing a restore. Truncating a table means that all records in that table are deleted.',
'Provides administrators with the option of backing up their pages and settings to an XML file.' => 'Provides administrators with the option of backing up their pages and settings to an XML file.',
'Reset passwords to' => 'Reset passwords to',
'Restore a backup' => 'Visszaállítás mentésből',
'Restore files' => 'Fájlok visszaállítása',
'Restore settings' => 'Visszaállítás beállításai',
'Restore uploaded files from backup?' => 'Restore uploaded files from backup?',
'Restoring a backup' => 'Visszaállítás mentésből folyamatban',
'Save' => 'Mentés',
'Settings' => 'Beállítások',
'Succesfully restored backup.' => 'Sikerese visszaállítás.',
'Successfully uninstalled plugin.' => 'Kiegészítő sikeresen eltávolítva.',
'The Backup/Restore plugin allows you to create complete backups of the Wolf CMS core database.' => 'The Backup/Restore plugin allows you to create complete backups of the Wolf CMS core database.',
'The Backup/Restore plugin allows you to create complete backups of the Wolf CMS database. It generates an XML file that contains all records for each of the Wolf CMS database tables, and optionally all uploaded files.' => 'The Backup/Restore plugin allows you to create complete backups of the Wolf CMS database. It generates an XML file that contains all records for each of the Wolf CMS database tables, and optionally all uploaded files.',
'The settings have been saved.' => 'Beállítások sikeresen elmentve.',
'This is an example of the filename that will be used for the generated XML file.' => 'This is an example of the filename that will be used for the generated XML file.',
'To create and download the backup, simply select the "Create a backup" option.' => 'To create and download the backup, simply select the "Create a backup" option.',
'To upload and restore a backup, simply select the "Restore a backup" option.' => 'To upload and restore a backup, simply select the "Restore a backup" option.',
'Unable to create directory :name.' => 'Unable to create directory :name.',
'Unable to delete directory :name.' => 'Unable to delete directory :name.',
'Unable to delete file :name.' => 'Unable to delete file :name.',
'Unable to reconstruct table :tablename.' => 'Unable to reconstruct table :tablename.',
'Unable to remove plugin settings.' => 'Unable to remove plugin settings.',
'Unable to restore attributes for :name.' => 'Unable to restore attributes for :name.',
'Unable to restore file :name.' => 'Unable to restore file :name.',
'Unable to restore modification date for :name.' => 'Unable to restore modification date for :name.',
'Unable to truncate current table :tablename.' => 'Unable to truncate current table :tablename.',
'Upload plain text XML file' => 'Upload plain text XML file',
'Version' => 'Version',
'Warning!' => 'Warning!',
'What extension should be used for the filename.' => 'What extension should be used for the filename.',
'What style of timestamp should be encorporated into the filename.' => 'What style of timestamp should be used in the filename.',
'When restoring a backup, please make sure that the backup file was generated from the same Wolf CMS <em>version</em> as you are restoring it to.' => 'When restoring a backup, please make sure that the backup file was generated from the same Wolf CMS <em>version</em> as you are restoring it to.',
'Yes' => 'Yes',
'You can set a default password to enter into any password fields if the backup file does not contain passwords. For this to function, the system expects there to be password fields in the backup file with no value.' => 'You can set a default password to enter into any password fields if the backup file does not contain passwords. For this to function, the system expects there to be password fields in the backup file with no value.',
'You do not have permission to access the requested page!' => 'You do not have permission to access the requested page!',
'You have modified this page. If you navigate away from this page without first saving your data, the changes will be lost.' => 'Az oldal módosításai elvesznek ha elnavigálsz!',
'and upwards.' => 'and upwards.'
);
|
{
"pile_set_name": "Github"
}
|
package com.code44.finance.ui.transactions.edit.autocomplete.adapters;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.code44.finance.R;
import com.code44.finance.data.model.Account;
import com.code44.finance.ui.transactions.edit.autocomplete.AutoCompleteAdapter;
import com.code44.finance.ui.transactions.edit.autocomplete.AutoCompleteResult;
import com.code44.finance.ui.transactions.edit.presenters.TransactionEditData;
import java.util.List;
public class AutoCompleteAccountsFromAdapter extends AutoCompleteAdapter<Account> {
public AutoCompleteAccountsFromAdapter(ViewGroup containerView, AutoCompleteAdapterListener listener, OnAutoCompleteItemClickListener<Account> clickListener) {
super(containerView, listener, clickListener);
}
@Override protected View newView(Context context, ViewGroup containerView) {
final View view = LayoutInflater.from(context).inflate(R.layout.li_account, containerView, false);
final int keylineContent = context.getResources().getDimensionPixelSize(R.dimen.keyline_content);
view.setPadding(keylineContent, view.getPaddingTop(), view.getPaddingRight(), view.getPaddingBottom());
return view;
}
@Override protected void bindView(View view, Account account) {
((TextView) view.findViewById(R.id.titleTextView)).setText(account.getTitle());
}
@Override protected boolean isSameAdapter(AutoCompleteAdapter<?> currentAdapter) {
return currentAdapter instanceof AutoCompleteAccountsFromAdapter;
}
@Override protected boolean showItem(TransactionEditData transactionEditData, Account item) {
return item != null && !item.equals(transactionEditData.getAccountFrom());
}
@Override protected List<Account> getItems(AutoCompleteResult autoCompleteResult) {
return autoCompleteResult.getAccountsFrom();
}
}
|
{
"pile_set_name": "Github"
}
|
// Copyright Aleksey Gurtovoy 2000-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// *Preprocessed* version of the main "deque.hpp" header
// -- DO NOT modify by hand!
namespace boost { namespace mpl {
template<
typename T0 = na, typename T1 = na, typename T2 = na, typename T3 = na
, typename T4 = na, typename T5 = na, typename T6 = na, typename T7 = na
, typename T8 = na, typename T9 = na, typename T10 = na, typename T11 = na
, typename T12 = na, typename T13 = na, typename T14 = na
, typename T15 = na, typename T16 = na, typename T17 = na
, typename T18 = na, typename T19 = na
>
struct deque;
template<
>
struct deque<
na, na, na, na, na, na, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector0< >
{
typedef vector0< >::type type;
};
template<
typename T0
>
struct deque<
T0, na, na, na, na, na, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector1<T0>
{
typedef typename vector1<T0>::type type;
};
template<
typename T0, typename T1
>
struct deque<
T0, T1, na, na, na, na, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector2< T0,T1 >
{
typedef typename vector2< T0,T1 >::type type;
};
template<
typename T0, typename T1, typename T2
>
struct deque<
T0, T1, T2, na, na, na, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector3< T0,T1,T2 >
{
typedef typename vector3< T0,T1,T2 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3
>
struct deque<
T0, T1, T2, T3, na, na, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector4< T0,T1,T2,T3 >
{
typedef typename vector4< T0,T1,T2,T3 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
>
struct deque<
T0, T1, T2, T3, T4, na, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector5< T0,T1,T2,T3,T4 >
{
typedef typename vector5< T0,T1,T2,T3,T4 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5
>
struct deque<
T0, T1, T2, T3, T4, T5, na, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector6< T0,T1,T2,T3,T4,T5 >
{
typedef typename vector6< T0,T1,T2,T3,T4,T5 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6
>
struct deque<
T0, T1, T2, T3, T4, T5, T6, na, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector7< T0,T1,T2,T3,T4,T5,T6 >
{
typedef typename vector7< T0,T1,T2,T3,T4,T5,T6 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7
>
struct deque<
T0, T1, T2, T3, T4, T5, T6, T7, na, na, na, na, na, na, na, na, na
, na, na, na
>
: vector8< T0,T1,T2,T3,T4,T5,T6,T7 >
{
typedef typename vector8< T0,T1,T2,T3,T4,T5,T6,T7 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8
>
struct deque<
T0, T1, T2, T3, T4, T5, T6, T7, T8, na, na, na, na, na, na, na, na
, na, na, na
>
: vector9< T0,T1,T2,T3,T4,T5,T6,T7,T8 >
{
typedef typename vector9< T0,T1,T2,T3,T4,T5,T6,T7,T8 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
>
struct deque<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, na, na, na, na, na, na, na
, na, na, na
>
: vector10< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9 >
{
typedef typename vector10< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10
>
struct deque<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, na, na, na, na, na, na
, na, na, na
>
: vector11< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10 >
{
typedef typename vector11< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11
>
struct deque<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, na, na, na, na
, na, na, na, na
>
: vector12< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11 >
{
typedef typename vector12< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12
>
struct deque<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, na, na, na
, na, na, na, na
>
: vector13< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12 >
{
typedef typename vector13< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13
>
struct deque<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, na, na
, na, na, na, na
>
: vector14< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13 >
{
typedef typename vector14< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13, typename T14
>
struct deque<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, na
, na, na, na, na
>
: vector15<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
>
{
typedef typename vector15< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13, typename T14
, typename T15
>
struct deque<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, na, na, na, na
>
: vector16<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15
>
{
typedef typename vector16< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13, typename T14
, typename T15, typename T16
>
struct deque<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16, na, na, na
>
: vector17<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16
>
{
typedef typename vector17< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13, typename T14
, typename T15, typename T16, typename T17
>
struct deque<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16, T17, na, na
>
: vector18<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16, T17
>
{
typedef typename vector18< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17 >::type type;
};
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13, typename T14
, typename T15, typename T16, typename T17, typename T18
>
struct deque<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16, T17, T18, na
>
: vector19<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16, T17, T18
>
{
typedef typename vector19< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18 >::type type;
};
/// primary template (not a specialization!)
template<
typename T0, typename T1, typename T2, typename T3, typename T4
, typename T5, typename T6, typename T7, typename T8, typename T9
, typename T10, typename T11, typename T12, typename T13, typename T14
, typename T15, typename T16, typename T17, typename T18, typename T19
>
struct deque
: vector20<
T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14
, T15, T16, T17, T18, T19
>
{
typedef typename vector20< T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12,T13,T14,T15,T16,T17,T18,T19 >::type type;
};
}}
|
{
"pile_set_name": "Github"
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.juli.logging;
/**
* <p>An exception that is thrown only if a suitable <code>LogFactory</code>
* or <code>Log</code> instance cannot be created by the corresponding
* factory methods.</p>
*
* @author Craig R. McClanahan
*/
public class LogConfigurationException extends RuntimeException {
private static final long serialVersionUID = 1L;
/**
* Construct a new exception with <code>null</code> as its detail message.
*/
public LogConfigurationException() {
super();
}
/**
* Construct a new exception with the specified detail message.
*
* @param message The detail message
*/
public LogConfigurationException(String message) {
super(message);
}
/**
* Construct a new exception with the specified cause and a derived
* detail message.
*
* @param cause The underlying cause
*/
public LogConfigurationException(Throwable cause) {
super(cause);
}
/**
* Construct a new exception with the specified detail message and cause.
*
* @param message The detail message
* @param cause The underlying cause
*/
public LogConfigurationException(String message, Throwable cause) {
super(message, cause);
}
}
|
{
"pile_set_name": "Github"
}
|
fails:Rational#abs returns self's absolute value
|
{
"pile_set_name": "Github"
}
|
## <span id="title">MQCloud-集客户端SDK,运维,监控预警等功能的[RocketMQ](https://github.com/apache/rocketmq)企业级一站式服务平台<span>
**它具备以下特性:**
* 跨集群:可以同时管理多个集群,对使用者透明。
* 预警功能:针对生产或消费堆积,失败,异常等情况预警。
* 简单明了:用户视图-拓扑、流量、消费状况等指标直接展示;管理员视图-集群运维、监控、流程审批等。
* 安全:用户隔离,操作审批,数据安全。
* 更多特性正在开发中。
* 下图简单描述了MQCloud大概的功能:

----------
## <span id="future">特性概览</span>
* 用户topic列表-不同用户看到不同的topic,管理员可以管理所有topic

* topic详情-分三块 基本信息,今日流程,拓扑

* 生产详情

* 消费详情

* 某个消费者具体的消费详情-可以查询重试消息和死消息

* 消息

* 消息消费情况

* 集群发现

* 集群管理

* 集群流量

* 创建broker

----------
## <span id="situation">目前运维的规模</span>
1. 服务器:40台+
2. 集群:5个+
3. topic:370个+
4. 生产消费消息量/日:10亿条+
5. 生产消费消息大小/日:1T+
----------
## <span id="contract">联系方式</span>
MQCloud QQ交流群:474960759
使用方式请参考[wiki](https://github.com/sohutv/sohu-tv-mq/wiki)。
|
{
"pile_set_name": "Github"
}
|
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Windows environment variables.
package windows
import "syscall"
func Getenv(key string) (value string, found bool) {
return syscall.Getenv(key)
}
func Setenv(key, value string) error {
return syscall.Setenv(key, value)
}
func Clearenv() {
syscall.Clearenv()
}
func Environ() []string {
return syscall.Environ()
}
func Unsetenv(key string) error {
return syscall.Unsetenv(key)
}
|
{
"pile_set_name": "Github"
}
|
// Copyright 2017 The Abseil Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// -----------------------------------------------------------------------------
// optional.h
// -----------------------------------------------------------------------------
//
// This header file defines the `absl::optional` type for holding a value which
// may or may not be present. This type is useful for providing value semantics
// for operations that may either wish to return or hold "something-or-nothing".
//
// Example:
//
// // A common way to signal operation failure is to provide an output
// // parameter and a bool return type:
// bool AcquireResource(const Input&, Resource * out);
//
// // Providing an absl::optional return type provides a cleaner API:
// absl::optional<Resource> AcquireResource(const Input&);
//
// `absl::optional` is a C++11 compatible version of the C++17 `std::optional`
// abstraction and is designed to be a drop-in replacement for code compliant
// with C++17.
#ifndef ABSL_TYPES_OPTIONAL_H_
#define ABSL_TYPES_OPTIONAL_H_
#include "absl/base/config.h" // TODO(calabrese) IWYU removal?
#include "absl/utility/utility.h"
#ifdef ABSL_HAVE_STD_OPTIONAL
#include <optional> // IWYU pragma: export
namespace absl {
using std::bad_optional_access;
using std::optional;
using std::make_optional;
using std::nullopt_t;
using std::nullopt;
} // namespace absl
#else // ABSL_HAVE_STD_OPTIONAL
#include <cassert>
#include <functional>
#include <initializer_list>
#include <type_traits>
#include <utility>
#include "absl/base/attributes.h"
#include "absl/base/internal/inline_variable.h"
#include "absl/meta/type_traits.h"
#include "absl/types/bad_optional_access.h"
#include "absl/types/internal/optional.h"
namespace absl {
// nullopt_t
//
// Class type for `absl::nullopt` used to indicate an `absl::optional<T>` type
// that does not contain a value.
struct nullopt_t {
// It must not be default-constructible to avoid ambiguity for opt = {}.
explicit constexpr nullopt_t(optional_internal::init_t) noexcept {}
};
// nullopt
//
// A tag constant of type `absl::nullopt_t` used to indicate an empty
// `absl::optional` in certain functions, such as construction or assignment.
ABSL_INTERNAL_INLINE_CONSTEXPR(nullopt_t, nullopt,
nullopt_t(optional_internal::init_t()));
// -----------------------------------------------------------------------------
// absl::optional
// -----------------------------------------------------------------------------
//
// A value of type `absl::optional<T>` holds either a value of `T` or an
// "empty" value. When it holds a value of `T`, it stores it as a direct
// sub-object, so `sizeof(optional<T>)` is approximately
// `sizeof(T) + sizeof(bool)`.
//
// This implementation is based on the specification in the latest draft of the
// C++17 `std::optional` specification as of May 2017, section 20.6.
//
// Differences between `absl::optional<T>` and `std::optional<T>` include:
//
// * `constexpr` is not used for non-const member functions.
// (dependency on some differences between C++11 and C++14.)
// * `absl::nullopt` and `absl::in_place` are not declared `constexpr`. We
// need the inline variable support in C++17 for external linkage.
// * Throws `absl::bad_optional_access` instead of
// `std::bad_optional_access`.
// * `make_optional()` cannot be declared `constexpr` due to the absence of
// guaranteed copy elision.
// * The move constructor's `noexcept` specification is stronger, i.e. if the
// default allocator is non-throwing (via setting
// `ABSL_ALLOCATOR_NOTHROW`), it evaluates to `noexcept(true)`, because
// we assume
// a) move constructors should only throw due to allocation failure and
// b) if T's move constructor allocates, it uses the same allocation
// function as the default allocator.
//
template <typename T>
class optional : private optional_internal::optional_data<T>,
private optional_internal::optional_ctor_base<
optional_internal::ctor_copy_traits<T>::traits>,
private optional_internal::optional_assign_base<
optional_internal::assign_copy_traits<T>::traits> {
using data_base = optional_internal::optional_data<T>;
public:
typedef T value_type;
// Constructors
// Constructs an `optional` holding an empty value, NOT a default constructed
// `T`.
constexpr optional() noexcept {}
// Constructs an `optional` initialized with `nullopt` to hold an empty value.
constexpr optional(nullopt_t) noexcept {} // NOLINT(runtime/explicit)
// Copy constructor, standard semantics
optional(const optional& src) = default;
// Move constructor, standard semantics
optional(optional&& src) = default;
// Constructs a non-empty `optional` direct-initialized value of type `T` from
// the arguments `std::forward<Args>(args)...` within the `optional`.
// (The `in_place_t` is a tag used to indicate that the contained object
// should be constructed in-place.)
template <typename InPlaceT, typename... Args,
absl::enable_if_t<absl::conjunction<
std::is_same<InPlaceT, in_place_t>,
std::is_constructible<T, Args&&...> >::value>* = nullptr>
constexpr explicit optional(InPlaceT, Args&&... args)
: data_base(in_place_t(), absl::forward<Args>(args)...) {}
// Constructs a non-empty `optional` direct-initialized value of type `T` from
// the arguments of an initializer_list and `std::forward<Args>(args)...`.
// (The `in_place_t` is a tag used to indicate that the contained object
// should be constructed in-place.)
template <typename U, typename... Args,
typename = typename std::enable_if<std::is_constructible<
T, std::initializer_list<U>&, Args&&...>::value>::type>
constexpr explicit optional(in_place_t, std::initializer_list<U> il,
Args&&... args)
: data_base(in_place_t(), il, absl::forward<Args>(args)...) {
}
// Value constructor (implicit)
template <
typename U = T,
typename std::enable_if<
absl::conjunction<absl::negation<std::is_same<
in_place_t, typename std::decay<U>::type> >,
absl::negation<std::is_same<
optional<T>, typename std::decay<U>::type> >,
std::is_convertible<U&&, T>,
std::is_constructible<T, U&&> >::value,
bool>::type = false>
constexpr optional(U&& v) : data_base(in_place_t(), absl::forward<U>(v)) {}
// Value constructor (explicit)
template <
typename U = T,
typename std::enable_if<
absl::conjunction<absl::negation<std::is_same<
in_place_t, typename std::decay<U>::type>>,
absl::negation<std::is_same<
optional<T>, typename std::decay<U>::type>>,
absl::negation<std::is_convertible<U&&, T>>,
std::is_constructible<T, U&&>>::value,
bool>::type = false>
explicit constexpr optional(U&& v)
: data_base(in_place_t(), absl::forward<U>(v)) {}
// Converting copy constructor (implicit)
template <typename U,
typename std::enable_if<
absl::conjunction<
absl::negation<std::is_same<T, U> >,
std::is_constructible<T, const U&>,
absl::negation<
optional_internal::
is_constructible_convertible_from_optional<T, U> >,
std::is_convertible<const U&, T> >::value,
bool>::type = false>
optional(const optional<U>& rhs) {
if (rhs) {
this->construct(*rhs);
}
}
// Converting copy constructor (explicit)
template <typename U,
typename std::enable_if<
absl::conjunction<
absl::negation<std::is_same<T, U>>,
std::is_constructible<T, const U&>,
absl::negation<
optional_internal::
is_constructible_convertible_from_optional<T, U>>,
absl::negation<std::is_convertible<const U&, T>>>::value,
bool>::type = false>
explicit optional(const optional<U>& rhs) {
if (rhs) {
this->construct(*rhs);
}
}
// Converting move constructor (implicit)
template <typename U,
typename std::enable_if<
absl::conjunction<
absl::negation<std::is_same<T, U> >,
std::is_constructible<T, U&&>,
absl::negation<
optional_internal::
is_constructible_convertible_from_optional<T, U> >,
std::is_convertible<U&&, T> >::value,
bool>::type = false>
optional(optional<U>&& rhs) {
if (rhs) {
this->construct(std::move(*rhs));
}
}
// Converting move constructor (explicit)
template <
typename U,
typename std::enable_if<
absl::conjunction<
absl::negation<std::is_same<T, U>>, std::is_constructible<T, U&&>,
absl::negation<
optional_internal::is_constructible_convertible_from_optional<
T, U>>,
absl::negation<std::is_convertible<U&&, T>>>::value,
bool>::type = false>
explicit optional(optional<U>&& rhs) {
if (rhs) {
this->construct(std::move(*rhs));
}
}
// Destructor. Trivial if `T` is trivially destructible.
~optional() = default;
// Assignment Operators
// Assignment from `nullopt`
//
// Example:
//
// struct S { int value; };
// optional<S> opt = absl::nullopt; // Could also use opt = { };
optional& operator=(nullopt_t) noexcept {
this->destruct();
return *this;
}
// Copy assignment operator, standard semantics
optional& operator=(const optional& src) = default;
// Move assignment operator, standard semantics
optional& operator=(optional&& src) = default;
// Value assignment operators
template <
typename U = T,
typename = typename std::enable_if<absl::conjunction<
absl::negation<
std::is_same<optional<T>, typename std::decay<U>::type>>,
absl::negation<
absl::conjunction<std::is_scalar<T>,
std::is_same<T, typename std::decay<U>::type>>>,
std::is_constructible<T, U>, std::is_assignable<T&, U>>::value>::type>
optional& operator=(U&& v) {
this->assign(std::forward<U>(v));
return *this;
}
template <
typename U,
typename = typename std::enable_if<absl::conjunction<
absl::negation<std::is_same<T, U>>,
std::is_constructible<T, const U&>, std::is_assignable<T&, const U&>,
absl::negation<
optional_internal::
is_constructible_convertible_assignable_from_optional<
T, U>>>::value>::type>
optional& operator=(const optional<U>& rhs) {
if (rhs) {
this->assign(*rhs);
} else {
this->destruct();
}
return *this;
}
template <typename U,
typename = typename std::enable_if<absl::conjunction<
absl::negation<std::is_same<T, U>>, std::is_constructible<T, U>,
std::is_assignable<T&, U>,
absl::negation<
optional_internal::
is_constructible_convertible_assignable_from_optional<
T, U>>>::value>::type>
optional& operator=(optional<U>&& rhs) {
if (rhs) {
this->assign(std::move(*rhs));
} else {
this->destruct();
}
return *this;
}
// Modifiers
// optional::reset()
//
// Destroys the inner `T` value of an `absl::optional` if one is present.
ABSL_ATTRIBUTE_REINITIALIZES void reset() noexcept { this->destruct(); }
// optional::emplace()
//
// (Re)constructs the underlying `T` in-place with the given forwarded
// arguments.
//
// Example:
//
// optional<Foo> opt;
// opt.emplace(arg1,arg2,arg3); // Constructs Foo(arg1,arg2,arg3)
//
// If the optional is non-empty, and the `args` refer to subobjects of the
// current object, then behaviour is undefined, because the current object
// will be destructed before the new object is constructed with `args`.
template <typename... Args,
typename = typename std::enable_if<
std::is_constructible<T, Args&&...>::value>::type>
T& emplace(Args&&... args) {
this->destruct();
this->construct(std::forward<Args>(args)...);
return reference();
}
// Emplace reconstruction overload for an initializer list and the given
// forwarded arguments.
//
// Example:
//
// struct Foo {
// Foo(std::initializer_list<int>);
// };
//
// optional<Foo> opt;
// opt.emplace({1,2,3}); // Constructs Foo({1,2,3})
template <typename U, typename... Args,
typename = typename std::enable_if<std::is_constructible<
T, std::initializer_list<U>&, Args&&...>::value>::type>
T& emplace(std::initializer_list<U> il, Args&&... args) {
this->destruct();
this->construct(il, std::forward<Args>(args)...);
return reference();
}
// Swaps
// Swap, standard semantics
void swap(optional& rhs) noexcept(
std::is_nothrow_move_constructible<T>::value&&
type_traits_internal::IsNothrowSwappable<T>::value) {
if (*this) {
if (rhs) {
type_traits_internal::Swap(**this, *rhs);
} else {
rhs.construct(std::move(**this));
this->destruct();
}
} else {
if (rhs) {
this->construct(std::move(*rhs));
rhs.destruct();
} else {
// No effect (swap(disengaged, disengaged)).
}
}
}
// Observers
// optional::operator->()
//
// Accesses the underlying `T` value's member `m` of an `optional`. If the
// `optional` is empty, behavior is undefined.
//
// If you need myOpt->foo in constexpr, use (*myOpt).foo instead.
const T* operator->() const {
assert(this->engaged_);
return std::addressof(this->data_);
}
T* operator->() {
assert(this->engaged_);
return std::addressof(this->data_);
}
// optional::operator*()
//
// Accesses the underlying `T` value of an `optional`. If the `optional` is
// empty, behavior is undefined.
constexpr const T& operator*() const & { return reference(); }
T& operator*() & {
assert(this->engaged_);
return reference();
}
constexpr const T&& operator*() const && {
return absl::move(reference());
}
T&& operator*() && {
assert(this->engaged_);
return std::move(reference());
}
// optional::operator bool()
//
// Returns false if and only if the `optional` is empty.
//
// if (opt) {
// // do something with opt.value();
// } else {
// // opt is empty.
// }
//
constexpr explicit operator bool() const noexcept { return this->engaged_; }
// optional::has_value()
//
// Determines whether the `optional` contains a value. Returns `false` if and
// only if `*this` is empty.
constexpr bool has_value() const noexcept { return this->engaged_; }
// Suppress bogus warning on MSVC: MSVC complains call to reference() after
// throw_bad_optional_access() is unreachable.
#ifdef _MSC_VER
#pragma warning(push)
#pragma warning(disable : 4702)
#endif // _MSC_VER
// optional::value()
//
// Returns a reference to an `optional`s underlying value. The constness
// and lvalue/rvalue-ness of the `optional` is preserved to the view of
// the `T` sub-object. Throws `absl::bad_optional_access` when the `optional`
// is empty.
constexpr const T& value() const & {
return static_cast<bool>(*this)
? reference()
: (optional_internal::throw_bad_optional_access(), reference());
}
T& value() & {
return static_cast<bool>(*this)
? reference()
: (optional_internal::throw_bad_optional_access(), reference());
}
T&& value() && { // NOLINT(build/c++11)
return std::move(
static_cast<bool>(*this)
? reference()
: (optional_internal::throw_bad_optional_access(), reference()));
}
constexpr const T&& value() const && { // NOLINT(build/c++11)
return absl::move(
static_cast<bool>(*this)
? reference()
: (optional_internal::throw_bad_optional_access(), reference()));
}
#ifdef _MSC_VER
#pragma warning(pop)
#endif // _MSC_VER
// optional::value_or()
//
// Returns either the value of `T` or a passed default `v` if the `optional`
// is empty.
template <typename U>
constexpr T value_or(U&& v) const& {
static_assert(std::is_copy_constructible<value_type>::value,
"optional<T>::value_or: T must by copy constructible");
static_assert(std::is_convertible<U&&, value_type>::value,
"optional<T>::value_or: U must be convertible to T");
return static_cast<bool>(*this)
? **this
: static_cast<T>(absl::forward<U>(v));
}
template <typename U>
T value_or(U&& v) && { // NOLINT(build/c++11)
static_assert(std::is_move_constructible<value_type>::value,
"optional<T>::value_or: T must by copy constructible");
static_assert(std::is_convertible<U&&, value_type>::value,
"optional<T>::value_or: U must be convertible to T");
return static_cast<bool>(*this) ? std::move(**this)
: static_cast<T>(std::forward<U>(v));
}
private:
// Private accessors for internal storage viewed as reference to T.
constexpr const T& reference() const { return this->data_; }
T& reference() { return this->data_; }
// T constraint checks. You can't have an optional of nullopt_t, in_place_t
// or a reference.
static_assert(
!std::is_same<nullopt_t, typename std::remove_cv<T>::type>::value,
"optional<nullopt_t> is not allowed.");
static_assert(
!std::is_same<in_place_t, typename std::remove_cv<T>::type>::value,
"optional<in_place_t> is not allowed.");
static_assert(!std::is_reference<T>::value,
"optional<reference> is not allowed.");
};
// Non-member functions
// swap()
//
// Performs a swap between two `absl::optional` objects, using standard
// semantics.
template <typename T, typename std::enable_if<
std::is_move_constructible<T>::value &&
type_traits_internal::IsSwappable<T>::value,
bool>::type = false>
void swap(optional<T>& a, optional<T>& b) noexcept(noexcept(a.swap(b))) {
a.swap(b);
}
// make_optional()
//
// Creates a non-empty `optional<T>` where the type of `T` is deduced. An
// `absl::optional` can also be explicitly instantiated with
// `make_optional<T>(v)`.
//
// Note: `make_optional()` constructions may be declared `constexpr` for
// trivially copyable types `T`. Non-trivial types require copy elision
// support in C++17 for `make_optional` to support `constexpr` on such
// non-trivial types.
//
// Example:
//
// constexpr absl::optional<int> opt = absl::make_optional(1);
// static_assert(opt.value() == 1, "");
template <typename T>
constexpr optional<typename std::decay<T>::type> make_optional(T&& v) {
return optional<typename std::decay<T>::type>(absl::forward<T>(v));
}
template <typename T, typename... Args>
constexpr optional<T> make_optional(Args&&... args) {
return optional<T>(in_place_t(), absl::forward<Args>(args)...);
}
template <typename T, typename U, typename... Args>
constexpr optional<T> make_optional(std::initializer_list<U> il,
Args&&... args) {
return optional<T>(in_place_t(), il,
absl::forward<Args>(args)...);
}
// Relational operators [optional.relops]
// Empty optionals are considered equal to each other and less than non-empty
// optionals. Supports relations between optional<T> and optional<U>, between
// optional<T> and U, and between optional<T> and nullopt.
//
// Note: We're careful to support T having non-bool relationals.
// Requires: The expression, e.g. "*x == *y" shall be well-formed and its result
// shall be convertible to bool.
// The C++17 (N4606) "Returns:" statements are translated into
// code in an obvious way here, and the original text retained as function docs.
// Returns: If bool(x) != bool(y), false; otherwise if bool(x) == false, true;
// otherwise *x == *y.
template <typename T, typename U>
constexpr auto operator==(const optional<T>& x, const optional<U>& y)
-> decltype(optional_internal::convertible_to_bool(*x == *y)) {
return static_cast<bool>(x) != static_cast<bool>(y)
? false
: static_cast<bool>(x) == false ? true
: static_cast<bool>(*x == *y);
}
// Returns: If bool(x) != bool(y), true; otherwise, if bool(x) == false, false;
// otherwise *x != *y.
template <typename T, typename U>
constexpr auto operator!=(const optional<T>& x, const optional<U>& y)
-> decltype(optional_internal::convertible_to_bool(*x != *y)) {
return static_cast<bool>(x) != static_cast<bool>(y)
? true
: static_cast<bool>(x) == false ? false
: static_cast<bool>(*x != *y);
}
// Returns: If !y, false; otherwise, if !x, true; otherwise *x < *y.
template <typename T, typename U>
constexpr auto operator<(const optional<T>& x, const optional<U>& y)
-> decltype(optional_internal::convertible_to_bool(*x < *y)) {
return !y ? false : !x ? true : static_cast<bool>(*x < *y);
}
// Returns: If !x, false; otherwise, if !y, true; otherwise *x > *y.
template <typename T, typename U>
constexpr auto operator>(const optional<T>& x, const optional<U>& y)
-> decltype(optional_internal::convertible_to_bool(*x > *y)) {
return !x ? false : !y ? true : static_cast<bool>(*x > *y);
}
// Returns: If !x, true; otherwise, if !y, false; otherwise *x <= *y.
template <typename T, typename U>
constexpr auto operator<=(const optional<T>& x, const optional<U>& y)
-> decltype(optional_internal::convertible_to_bool(*x <= *y)) {
return !x ? true : !y ? false : static_cast<bool>(*x <= *y);
}
// Returns: If !y, true; otherwise, if !x, false; otherwise *x >= *y.
template <typename T, typename U>
constexpr auto operator>=(const optional<T>& x, const optional<U>& y)
-> decltype(optional_internal::convertible_to_bool(*x >= *y)) {
return !y ? true : !x ? false : static_cast<bool>(*x >= *y);
}
// Comparison with nullopt [optional.nullops]
// The C++17 (N4606) "Returns:" statements are used directly here.
template <typename T>
constexpr bool operator==(const optional<T>& x, nullopt_t) noexcept {
return !x;
}
template <typename T>
constexpr bool operator==(nullopt_t, const optional<T>& x) noexcept {
return !x;
}
template <typename T>
constexpr bool operator!=(const optional<T>& x, nullopt_t) noexcept {
return static_cast<bool>(x);
}
template <typename T>
constexpr bool operator!=(nullopt_t, const optional<T>& x) noexcept {
return static_cast<bool>(x);
}
template <typename T>
constexpr bool operator<(const optional<T>&, nullopt_t) noexcept {
return false;
}
template <typename T>
constexpr bool operator<(nullopt_t, const optional<T>& x) noexcept {
return static_cast<bool>(x);
}
template <typename T>
constexpr bool operator<=(const optional<T>& x, nullopt_t) noexcept {
return !x;
}
template <typename T>
constexpr bool operator<=(nullopt_t, const optional<T>&) noexcept {
return true;
}
template <typename T>
constexpr bool operator>(const optional<T>& x, nullopt_t) noexcept {
return static_cast<bool>(x);
}
template <typename T>
constexpr bool operator>(nullopt_t, const optional<T>&) noexcept {
return false;
}
template <typename T>
constexpr bool operator>=(const optional<T>&, nullopt_t) noexcept {
return true;
}
template <typename T>
constexpr bool operator>=(nullopt_t, const optional<T>& x) noexcept {
return !x;
}
// Comparison with T [optional.comp_with_t]
// Requires: The expression, e.g. "*x == v" shall be well-formed and its result
// shall be convertible to bool.
// The C++17 (N4606) "Equivalent to:" statements are used directly here.
template <typename T, typename U>
constexpr auto operator==(const optional<T>& x, const U& v)
-> decltype(optional_internal::convertible_to_bool(*x == v)) {
return static_cast<bool>(x) ? static_cast<bool>(*x == v) : false;
}
template <typename T, typename U>
constexpr auto operator==(const U& v, const optional<T>& x)
-> decltype(optional_internal::convertible_to_bool(v == *x)) {
return static_cast<bool>(x) ? static_cast<bool>(v == *x) : false;
}
template <typename T, typename U>
constexpr auto operator!=(const optional<T>& x, const U& v)
-> decltype(optional_internal::convertible_to_bool(*x != v)) {
return static_cast<bool>(x) ? static_cast<bool>(*x != v) : true;
}
template <typename T, typename U>
constexpr auto operator!=(const U& v, const optional<T>& x)
-> decltype(optional_internal::convertible_to_bool(v != *x)) {
return static_cast<bool>(x) ? static_cast<bool>(v != *x) : true;
}
template <typename T, typename U>
constexpr auto operator<(const optional<T>& x, const U& v)
-> decltype(optional_internal::convertible_to_bool(*x < v)) {
return static_cast<bool>(x) ? static_cast<bool>(*x < v) : true;
}
template <typename T, typename U>
constexpr auto operator<(const U& v, const optional<T>& x)
-> decltype(optional_internal::convertible_to_bool(v < *x)) {
return static_cast<bool>(x) ? static_cast<bool>(v < *x) : false;
}
template <typename T, typename U>
constexpr auto operator<=(const optional<T>& x, const U& v)
-> decltype(optional_internal::convertible_to_bool(*x <= v)) {
return static_cast<bool>(x) ? static_cast<bool>(*x <= v) : true;
}
template <typename T, typename U>
constexpr auto operator<=(const U& v, const optional<T>& x)
-> decltype(optional_internal::convertible_to_bool(v <= *x)) {
return static_cast<bool>(x) ? static_cast<bool>(v <= *x) : false;
}
template <typename T, typename U>
constexpr auto operator>(const optional<T>& x, const U& v)
-> decltype(optional_internal::convertible_to_bool(*x > v)) {
return static_cast<bool>(x) ? static_cast<bool>(*x > v) : false;
}
template <typename T, typename U>
constexpr auto operator>(const U& v, const optional<T>& x)
-> decltype(optional_internal::convertible_to_bool(v > *x)) {
return static_cast<bool>(x) ? static_cast<bool>(v > *x) : true;
}
template <typename T, typename U>
constexpr auto operator>=(const optional<T>& x, const U& v)
-> decltype(optional_internal::convertible_to_bool(*x >= v)) {
return static_cast<bool>(x) ? static_cast<bool>(*x >= v) : false;
}
template <typename T, typename U>
constexpr auto operator>=(const U& v, const optional<T>& x)
-> decltype(optional_internal::convertible_to_bool(v >= *x)) {
return static_cast<bool>(x) ? static_cast<bool>(v >= *x) : true;
}
} // namespace absl
namespace std {
// std::hash specialization for absl::optional.
template <typename T>
struct hash<absl::optional<T> >
: absl::optional_internal::optional_hash_base<T> {};
} // namespace std
#undef ABSL_MSVC_CONSTEXPR_BUG_IN_UNION_LIKE_CLASS
#endif // ABSL_HAVE_STD_OPTIONAL
#endif // ABSL_TYPES_OPTIONAL_H_
|
{
"pile_set_name": "Github"
}
|
name: frontend
on: pull_request
jobs:
frontend:
runs-on: ubuntu-latest
strategy:
matrix:
gulp_cmd:
- lint --ci
- test:unit --ci --headless
steps:
- uses: actions/checkout@v2
- name: Set up Node
uses: actions/setup-node@v1
with:
node-version: '12.x'
- name: Set up Python
uses: actions/setup-python@v1
with:
python-version: 3.6
- name: Install dependencies
run: |
./frontend.sh development
python -m pip install --upgrade pip
pip install -r requirements/ci.txt
- name: Run ${{ matrix.gulp_cmd }}
run: yarn run gulp ${{ matrix.gulp_cmd }}
- name: Store test coverage
# Submit coverage from the unit test run only
if: matrix.gulp_cmd == 'test:unit --ci --headless'
uses: actions/upload-artifact@v1
with:
name: frontend_coverage
path: ./test/unit_test_coverage/clover.xml
coverage:
runs-on: ubuntu-latest
needs:
- frontend
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v1
with:
python-version: 3.6
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements/ci.txt
- name: Retrieve frontend coverage
uses: actions/download-artifact@v1
with:
name: frontend_coverage
- name: Check frontend test coverage
run: |
diff-cover frontend_coverage/clover.xml --compare-branch=origin/main --fail-under=100
|
{
"pile_set_name": "Github"
}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_55) on Thu Jan 01 15:37:47 PST 2015 -->
<meta http-equiv="Content-Type" content="text/html" charset="UTF-8">
<title>Uses of Class com.fasterxml.jackson.core.JsonGenerationException (Jackson-core 2.5.0 API)</title>
<meta name="date" content="2015-01-01">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class com.fasterxml.jackson.core.JsonGenerationException (Jackson-core 2.5.0 API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../com/fasterxml/jackson/core/JsonGenerationException.html" title="class in com.fasterxml.jackson.core">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?com/fasterxml/jackson/core/class-use/JsonGenerationException.html" target="_top">Frames</a></li>
<li><a href="JsonGenerationException.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class com.fasterxml.jackson.core.JsonGenerationException" class="title">Uses of Class<br>com.fasterxml.jackson.core.JsonGenerationException</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../com/fasterxml/jackson/core/JsonGenerationException.html" title="class in com.fasterxml.jackson.core">JsonGenerationException</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#com.fasterxml.jackson.core">com.fasterxml.jackson.core</a></td>
<td class="colLast">
<div class="block">Main public API classes of the core streaming JSON
processor: most importantly <a href="../../../../../com/fasterxml/jackson/core/JsonFactory.html" title="class in com.fasterxml.jackson.core"><code>JsonFactory</code></a>
used for constructing
JSON parser (<a href="../../../../../com/fasterxml/jackson/core/JsonParser.html" title="class in com.fasterxml.jackson.core"><code>JsonParser</code></a>)
and generator
(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core"><code>JsonGenerator</code></a>)
instances.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="#com.fasterxml.jackson.core.json">com.fasterxml.jackson.core.json</a></td>
<td class="colLast">
<div class="block">JSON-specific parser and generator implementation classes that
Jackson defines and uses.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="#com.fasterxml.jackson.core.util">com.fasterxml.jackson.core.util</a></td>
<td class="colLast">
<div class="block">Utility classes used by Jackson Core functionality.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="com.fasterxml.jackson.core">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../com/fasterxml/jackson/core/JsonGenerationException.html" title="class in com.fasterxml.jackson.core">JsonGenerationException</a> in <a href="../../../../../com/fasterxml/jackson/core/package-summary.html">com.fasterxml.jackson.core</a></h3>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../com/fasterxml/jackson/core/package-summary.html">com.fasterxml.jackson.core</a> that throw <a href="../../../../../com/fasterxml/jackson/core/JsonGenerationException.html" title="class in com.fasterxml.jackson.core">JsonGenerationException</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>protected void</code></td>
<td class="colLast"><span class="strong">JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html#_reportError(java.lang.String)">_reportError</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> msg)</code>
<div class="block">Helper method used for constructing and throwing
<a href="../../../../../com/fasterxml/jackson/core/JsonGenerationException.html" title="class in com.fasterxml.jackson.core"><code>JsonGenerationException</code></a> with given base message.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">PrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/PrettyPrinter.html#beforeArrayValues(com.fasterxml.jackson.core.JsonGenerator)">beforeArrayValues</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called after array start marker has been output,
and right before the first value is to be output.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">PrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/PrettyPrinter.html#beforeObjectEntries(com.fasterxml.jackson.core.JsonGenerator)">beforeObjectEntries</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called after object start marker has been output,
and right before the field name of the first entry is
to be output.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">PrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/PrettyPrinter.html#writeArrayValueSeparator(com.fasterxml.jackson.core.JsonGenerator)">writeArrayValueSeparator</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called after an array value has been completely
output, and before another value is to be output.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">PrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/PrettyPrinter.html#writeEndArray(com.fasterxml.jackson.core.JsonGenerator, int)">writeEndArray</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg,
int nrOfValues)</code>
<div class="block">Method called after an Array value has been completely output
(minus closing bracket).</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">PrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/PrettyPrinter.html#writeEndObject(com.fasterxml.jackson.core.JsonGenerator, int)">writeEndObject</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg,
int nrOfEntries)</code>
<div class="block">Method called after an Object value has been completely output
(minus closing curly bracket).</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">PrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/PrettyPrinter.html#writeObjectEntrySeparator(com.fasterxml.jackson.core.JsonGenerator)">writeObjectEntrySeparator</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called after an object entry (field:value) has been completely
output, and before another value is to be output.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">PrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/PrettyPrinter.html#writeObjectFieldValueSeparator(com.fasterxml.jackson.core.JsonGenerator)">writeObjectFieldValueSeparator</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called after an object field has been output, but
before the value is output.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">PrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/PrettyPrinter.html#writeRootValueSeparator(com.fasterxml.jackson.core.JsonGenerator)">writeRootValueSeparator</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called after a root-level value has been completely
output, and before another value is to be output.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">PrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/PrettyPrinter.html#writeStartArray(com.fasterxml.jackson.core.JsonGenerator)">writeStartArray</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called when an Array value is to be output, before
any member/child values are output.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">PrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/PrettyPrinter.html#writeStartObject(com.fasterxml.jackson.core.JsonGenerator)">writeStartObject</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called when an Object value is to be output, before
any fields are output.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="com.fasterxml.jackson.core.json">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../com/fasterxml/jackson/core/JsonGenerationException.html" title="class in com.fasterxml.jackson.core">JsonGenerationException</a> in <a href="../../../../../com/fasterxml/jackson/core/json/package-summary.html">com.fasterxml.jackson.core.json</a></h3>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../com/fasterxml/jackson/core/json/package-summary.html">com.fasterxml.jackson.core.json</a> that throw <a href="../../../../../com/fasterxml/jackson/core/JsonGenerationException.html" title="class in com.fasterxml.jackson.core">JsonGenerationException</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>protected void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#_verifyPrettyValueWrite(java.lang.String, int)">_verifyPrettyValueWrite</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> typeMsg,
int status)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>protected void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#_verifyValueWrite(java.lang.String)">_verifyValueWrite</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> typeMsg)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>protected void</code></td>
<td class="colLast"><span class="strong">WriterBasedJsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/WriterBasedJsonGenerator.html#_writeBinary(com.fasterxml.jackson.core.Base64Variant, byte[], int, int)">_writeBinary</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/Base64Variant.html" title="class in com.fasterxml.jackson.core">Base64Variant</a> b64variant,
byte[] input,
int inputPtr,
int inputEnd)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>protected void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#_writeBinary(com.fasterxml.jackson.core.Base64Variant, byte[], int, int)">_writeBinary</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/Base64Variant.html" title="class in com.fasterxml.jackson.core">Base64Variant</a> b64variant,
byte[] input,
int inputPtr,
int inputEnd)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>protected int</code></td>
<td class="colLast"><span class="strong">WriterBasedJsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/WriterBasedJsonGenerator.html#_writeBinary(com.fasterxml.jackson.core.Base64Variant, java.io.InputStream, byte[])">_writeBinary</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/Base64Variant.html" title="class in com.fasterxml.jackson.core">Base64Variant</a> b64variant,
<a href="http://docs.oracle.com/javase/6/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a> data,
byte[] readBuffer)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>protected int</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#_writeBinary(com.fasterxml.jackson.core.Base64Variant, java.io.InputStream, byte[])">_writeBinary</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/Base64Variant.html" title="class in com.fasterxml.jackson.core">Base64Variant</a> b64variant,
<a href="http://docs.oracle.com/javase/6/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a> data,
byte[] readBuffer)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>protected int</code></td>
<td class="colLast"><span class="strong">WriterBasedJsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/WriterBasedJsonGenerator.html#_writeBinary(com.fasterxml.jackson.core.Base64Variant, java.io.InputStream, byte[], int)">_writeBinary</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/Base64Variant.html" title="class in com.fasterxml.jackson.core">Base64Variant</a> b64variant,
<a href="http://docs.oracle.com/javase/6/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a> data,
byte[] readBuffer,
int bytesLeft)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>protected int</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#_writeBinary(com.fasterxml.jackson.core.Base64Variant, java.io.InputStream, byte[], int)">_writeBinary</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/Base64Variant.html" title="class in com.fasterxml.jackson.core">Base64Variant</a> b64variant,
<a href="http://docs.oracle.com/javase/6/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a> data,
byte[] readBuffer,
int bytesLeft)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>protected void</code></td>
<td class="colLast"><span class="strong">WriterBasedJsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/WriterBasedJsonGenerator.html#_writePPFieldName(com.fasterxml.jackson.core.SerializableString, boolean)">_writePPFieldName</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/SerializableString.html" title="interface in com.fasterxml.jackson.core">SerializableString</a> name,
boolean commaBefore)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>protected void</code></td>
<td class="colLast"><span class="strong">WriterBasedJsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/WriterBasedJsonGenerator.html#_writePPFieldName(java.lang.String, boolean)">_writePPFieldName</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> name,
boolean commaBefore)</code>
<div class="block">Specialized version of <code>_writeFieldName</code>, off-lined
to keep the "fast path" as simple (and hopefully fast) as possible.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">WriterBasedJsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/WriterBasedJsonGenerator.html#writeBinary(com.fasterxml.jackson.core.Base64Variant, byte[], int, int)">writeBinary</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/Base64Variant.html" title="class in com.fasterxml.jackson.core">Base64Variant</a> b64variant,
byte[] data,
int offset,
int len)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeBinary(com.fasterxml.jackson.core.Base64Variant, byte[], int, int)">writeBinary</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/Base64Variant.html" title="class in com.fasterxml.jackson.core">Base64Variant</a> b64variant,
byte[] data,
int offset,
int len)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><span class="strong">WriterBasedJsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/WriterBasedJsonGenerator.html#writeBinary(com.fasterxml.jackson.core.Base64Variant, java.io.InputStream, int)">writeBinary</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/Base64Variant.html" title="class in com.fasterxml.jackson.core">Base64Variant</a> b64variant,
<a href="http://docs.oracle.com/javase/6/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a> data,
int dataLength)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>int</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeBinary(com.fasterxml.jackson.core.Base64Variant, java.io.InputStream, int)">writeBinary</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/Base64Variant.html" title="class in com.fasterxml.jackson.core">Base64Variant</a> b64variant,
<a href="http://docs.oracle.com/javase/6/docs/api/java/io/InputStream.html?is-external=true" title="class or interface in java.io">InputStream</a> data,
int dataLength)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeBoolean(boolean)">writeBoolean</a></strong>(boolean state)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">WriterBasedJsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/WriterBasedJsonGenerator.html#writeEndArray()">writeEndArray</a></strong>()</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">WriterBasedJsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/WriterBasedJsonGenerator.html#writeEndObject()">writeEndObject</a></strong>()</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeNull()">writeNull</a></strong>()</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeNumber(java.math.BigDecimal)">writeNumber</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/math/BigDecimal.html?is-external=true" title="class or interface in java.math">BigDecimal</a> value)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeNumber(java.math.BigInteger)">writeNumber</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/math/BigInteger.html?is-external=true" title="class or interface in java.math">BigInteger</a> value)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeNumber(double)">writeNumber</a></strong>(double d)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeNumber(float)">writeNumber</a></strong>(float f)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeNumber(int)">writeNumber</a></strong>(int i)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeNumber(long)">writeNumber</a></strong>(long l)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeNumber(short)">writeNumber</a></strong>(short s)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeNumber(java.lang.String)">writeNumber</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> encodedValue)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeRaw(char)">writeRaw</a></strong>(char ch)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeRaw(char[], int, int)">writeRaw</a></strong>(char[] cbuf,
int offset,
int len)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeRaw(com.fasterxml.jackson.core.SerializableString)">writeRaw</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/SerializableString.html" title="interface in com.fasterxml.jackson.core">SerializableString</a> text)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeRaw(java.lang.String)">writeRaw</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> text)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">UTF8JsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/UTF8JsonGenerator.html#writeRaw(java.lang.String, int, int)">writeRaw</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> text,
int offset,
int len)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">WriterBasedJsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/WriterBasedJsonGenerator.html#writeStartArray()">writeStartArray</a></strong>()</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">WriterBasedJsonGenerator.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/WriterBasedJsonGenerator.html#writeStartObject()">writeStartObject</a></strong>()</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">JsonGeneratorImpl.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/json/JsonGeneratorImpl.html#writeStringField(java.lang.String, java.lang.String)">writeStringField</a></strong>(<a href="http://docs.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> fieldName,
<a href="http://docs.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a> value)</code> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="com.fasterxml.jackson.core.util">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../com/fasterxml/jackson/core/JsonGenerationException.html" title="class in com.fasterxml.jackson.core">JsonGenerationException</a> in <a href="../../../../../com/fasterxml/jackson/core/util/package-summary.html">com.fasterxml.jackson.core.util</a></h3>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../com/fasterxml/jackson/core/util/package-summary.html">com.fasterxml.jackson.core.util</a> that throw <a href="../../../../../com/fasterxml/jackson/core/JsonGenerationException.html" title="class in com.fasterxml.jackson.core">JsonGenerationException</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">MinimalPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/MinimalPrettyPrinter.html#beforeArrayValues(com.fasterxml.jackson.core.JsonGenerator)">beforeArrayValues</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">DefaultPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/DefaultPrettyPrinter.html#beforeArrayValues(com.fasterxml.jackson.core.JsonGenerator)">beforeArrayValues</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">MinimalPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/MinimalPrettyPrinter.html#beforeObjectEntries(com.fasterxml.jackson.core.JsonGenerator)">beforeObjectEntries</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">DefaultPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/DefaultPrettyPrinter.html#beforeObjectEntries(com.fasterxml.jackson.core.JsonGenerator)">beforeObjectEntries</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code><a href="http://docs.oracle.com/javase/6/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></td>
<td class="colLast"><span class="strong">JsonParserDelegate.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/JsonParserDelegate.html#getObjectId()">getObjectId</a></strong>()</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="http://docs.oracle.com/javase/6/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></code></td>
<td class="colLast"><span class="strong">JsonParserDelegate.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/JsonParserDelegate.html#getTypeId()">getTypeId</a></strong>()</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">MinimalPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/MinimalPrettyPrinter.html#writeArrayValueSeparator(com.fasterxml.jackson.core.JsonGenerator)">writeArrayValueSeparator</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called after an array value has been completely
output, and before another value is to be output.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">DefaultPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/DefaultPrettyPrinter.html#writeArrayValueSeparator(com.fasterxml.jackson.core.JsonGenerator)">writeArrayValueSeparator</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called after an array value has been completely
output, and before another value is to be output.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">MinimalPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/MinimalPrettyPrinter.html#writeEndArray(com.fasterxml.jackson.core.JsonGenerator, int)">writeEndArray</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg,
int nrOfValues)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">DefaultPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/DefaultPrettyPrinter.html#writeEndArray(com.fasterxml.jackson.core.JsonGenerator, int)">writeEndArray</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg,
int nrOfValues)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">MinimalPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/MinimalPrettyPrinter.html#writeEndObject(com.fasterxml.jackson.core.JsonGenerator, int)">writeEndObject</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg,
int nrOfEntries)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">DefaultPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/DefaultPrettyPrinter.html#writeEndObject(com.fasterxml.jackson.core.JsonGenerator, int)">writeEndObject</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg,
int nrOfEntries)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">DefaultPrettyPrinter.FixedSpaceIndenter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/DefaultPrettyPrinter.FixedSpaceIndenter.html#writeIndentation(com.fasterxml.jackson.core.JsonGenerator, int)">writeIndentation</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg,
int level)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">MinimalPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/MinimalPrettyPrinter.html#writeObjectEntrySeparator(com.fasterxml.jackson.core.JsonGenerator)">writeObjectEntrySeparator</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called after an object entry (field:value) has been completely
output, and before another value is to be output.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">DefaultPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/DefaultPrettyPrinter.html#writeObjectEntrySeparator(com.fasterxml.jackson.core.JsonGenerator)">writeObjectEntrySeparator</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called after an object entry (field:value) has been completely
output, and before another value is to be output.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">MinimalPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/MinimalPrettyPrinter.html#writeObjectFieldValueSeparator(com.fasterxml.jackson.core.JsonGenerator)">writeObjectFieldValueSeparator</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called after an object field has been output, but
before the value is output.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">DefaultPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/DefaultPrettyPrinter.html#writeObjectFieldValueSeparator(com.fasterxml.jackson.core.JsonGenerator)">writeObjectFieldValueSeparator</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code>
<div class="block">Method called after an object field has been output, but
before the value is output.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">MinimalPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/MinimalPrettyPrinter.html#writeRootValueSeparator(com.fasterxml.jackson.core.JsonGenerator)">writeRootValueSeparator</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">DefaultPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/DefaultPrettyPrinter.html#writeRootValueSeparator(com.fasterxml.jackson.core.JsonGenerator)">writeRootValueSeparator</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">MinimalPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/MinimalPrettyPrinter.html#writeStartArray(com.fasterxml.jackson.core.JsonGenerator)">writeStartArray</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">DefaultPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/DefaultPrettyPrinter.html#writeStartArray(com.fasterxml.jackson.core.JsonGenerator)">writeStartArray</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">MinimalPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/MinimalPrettyPrinter.html#writeStartObject(com.fasterxml.jackson.core.JsonGenerator)">writeStartObject</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><code>void</code></td>
<td class="colLast"><span class="strong">DefaultPrettyPrinter.</span><code><strong><a href="../../../../../com/fasterxml/jackson/core/util/DefaultPrettyPrinter.html#writeStartObject(com.fasterxml.jackson.core.JsonGenerator)">writeStartObject</a></strong>(<a href="../../../../../com/fasterxml/jackson/core/JsonGenerator.html" title="class in com.fasterxml.jackson.core">JsonGenerator</a> jg)</code> </td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../com/fasterxml/jackson/core/JsonGenerationException.html" title="class in com.fasterxml.jackson.core">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?com/fasterxml/jackson/core/class-use/JsonGenerationException.html" target="_top">Frames</a></li>
<li><a href="JsonGenerationException.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2014-2015 <a href="http://fasterxml.com/">FasterXML</a>. All Rights Reserved.</small></p>
</body>
</html>
|
{
"pile_set_name": "Github"
}
|
// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Threading.Tasks;
using Pulumi.Serialization;
namespace Pulumi.Aws.WafV2.Inputs
{
public sealed class WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementGetArgs : Pulumi.ResourceArgs
{
/// <summary>
/// A rule statement that defines a string match search for AWS WAF to apply to web requests. See Byte Match Statement below for details.
/// </summary>
[Input("byteMatchStatement")]
public Input<Inputs.WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementByteMatchStatementGetArgs>? ByteMatchStatement { get; set; }
/// <summary>
/// A rule statement used to identify web requests based on country of origin. See GEO Match Statement below for details.
/// </summary>
[Input("geoMatchStatement")]
public Input<Inputs.WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementGeoMatchStatementGetArgs>? GeoMatchStatement { get; set; }
/// <summary>
/// A rule statement used to detect web requests coming from particular IP addresses or address ranges. See IP Set Reference Statement below for details.
/// </summary>
[Input("ipSetReferenceStatement")]
public Input<Inputs.WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementIpSetReferenceStatementGetArgs>? IpSetReferenceStatement { get; set; }
/// <summary>
/// A rule statement used to search web request components for matches with regular expressions. See Regex Pattern Set Reference Statement below for details.
/// </summary>
[Input("regexPatternSetReferenceStatement")]
public Input<Inputs.WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementRegexPatternSetReferenceStatementGetArgs>? RegexPatternSetReferenceStatement { get; set; }
/// <summary>
/// A rule statement that compares a number of bytes against the size of a request component, using a comparison operator, such as greater than (>) or less than (<). See Size Constraint Statement below for more details.
/// </summary>
[Input("sizeConstraintStatement")]
public Input<Inputs.WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementSizeConstraintStatementGetArgs>? SizeConstraintStatement { get; set; }
/// <summary>
/// An SQL injection match condition identifies the part of web requests, such as the URI or the query string, that you want AWS WAF to inspect. See SQL Injection Match Statement below for details.
/// </summary>
[Input("sqliMatchStatement")]
public Input<Inputs.WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementSqliMatchStatementGetArgs>? SqliMatchStatement { get; set; }
/// <summary>
/// A rule statement that defines a cross-site scripting (XSS) match search for AWS WAF to apply to web requests. See XSS Match Statement below for details.
/// </summary>
[Input("xssMatchStatement")]
public Input<Inputs.WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementXssMatchStatementGetArgs>? XssMatchStatement { get; set; }
public WebAclRuleStatementAndStatementStatementOrStatementStatementAndStatementStatementGetArgs()
{
}
}
}
|
{
"pile_set_name": "Github"
}
|
package com.mossle.core.query;
/**
* MatchType.
*
* @author Lingo
*/
public enum MatchType {
/** equals. */
EQ,
/** like. */
LIKE,
/** less than. */
LT,
/** greater than. */
GT,
/** less equals. */
LE,
/** greater equals. */
GE,
/** in. */
IN,
/** NOT. */
NOT,
/** IS NULL. */
INL,
/** NOT NULL. */
NNL,
/** unknown. */
UNKNOWN;
}
|
{
"pile_set_name": "Github"
}
|
package de.agilecoders.wicket.samples.components.base;
import org.apache.wicket.markup.html.panel.GenericPanel;
import org.apache.wicket.model.IModel;
/**
* Base Section class
*
* @author miha
*/
public class Section<T> extends GenericPanel<T> {
/**
* Construct.
*
* @param id component id
*/
public Section(String id) {
this(id, null);
}
/**
* Construct.
*
* @param id component id
* @param model the data model
*/
public Section(String id, IModel<T> model) {
super(id, model);
setOutputMarkupId(true);
setMarkupId(getId());
}
}
|
{
"pile_set_name": "Github"
}
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }();
var _utilities = require('./../utilities');
var schema = [{
enum: ['always', 'never'],
type: 'string'
}];
var create = function create(context) {
var sourceCode = context.getSourceCode();
var never = (context.options[0] || 'never') === 'never';
return {
GenericTypeAnnotation(node) {
var types = node.typeParameters;
// Promise<foo>
// ^^^^^^^^^^^^ GenericTypeAnnotation (with typeParameters)
// ^^^ GenericTypeAnnotation (without typeParameters)
if (!types) {
return;
}
var _sourceCode$getFirstT = sourceCode.getFirstTokens(types, 2),
_sourceCode$getFirstT2 = _slicedToArray(_sourceCode$getFirstT, 2),
opener = _sourceCode$getFirstT2[0],
firstInnerToken = _sourceCode$getFirstT2[1];
var _sourceCode$getLastTo = sourceCode.getLastTokens(types, 2),
_sourceCode$getLastTo2 = _slicedToArray(_sourceCode$getLastTo, 2),
lastInnerToken = _sourceCode$getLastTo2[0],
closer = _sourceCode$getLastTo2[1];
var spacesBefore = firstInnerToken.start - opener.end;
var spacesAfter = closer.start - lastInnerToken.end;
if (never) {
if (spacesBefore) {
context.report({
data: { name: node.id.name },
fix: _utilities.spacingFixers.stripSpacesAfter(opener, spacesBefore),
message: 'There must be no space at start of "{{name}}" generic type annotation',
node: types
});
}
if (spacesAfter) {
context.report({
data: { name: node.id.name },
fix: _utilities.spacingFixers.stripSpacesAfter(lastInnerToken, spacesAfter),
message: 'There must be no space at end of "{{name}}" generic type annotation',
node: types
});
}
} else {
if (spacesBefore > 1) {
context.report({
data: { name: node.id.name },
fix: _utilities.spacingFixers.stripSpacesAfter(opener, spacesBefore - 1),
message: 'There must be one space at start of "{{name}}" generic type annotation',
node: types
});
} else if (spacesBefore === 0) {
context.report({
data: { name: node.id.name },
fix: _utilities.spacingFixers.addSpaceAfter(opener),
message: 'There must be a space at start of "{{name}}" generic type annotation',
node: types
});
}
if (spacesAfter > 1) {
context.report({
data: { name: node.id.name },
fix: _utilities.spacingFixers.stripSpacesAfter(lastInnerToken, spacesAfter - 1),
message: 'There must be one space at end of "{{name}}" generic type annotation',
node: types
});
} else if (spacesAfter === 0) {
context.report({
data: { name: node.id.name },
fix: _utilities.spacingFixers.addSpaceAfter(lastInnerToken),
message: 'There must be a space at end of "{{name}}" generic type annotation',
node: types
});
}
}
}
};
};
exports.default = {
create,
schema
};
module.exports = exports['default'];
|
{
"pile_set_name": "Github"
}
|
{
"name": "SCRIBENDI INC.",
"displayName": "SCRIBENDI",
"properties": [
"scribendi.com"
]
}
|
{
"pile_set_name": "Github"
}
|
package com.planet_ink.coffee_mud.Common;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.lang.ref.WeakReference;
import java.util.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
/*
Copyright 2015-2020 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class DefaultAuctionPolicy implements AuctionPolicy, Cloneable
{
private double liveListPrice = 0.0;
private double timeListPrice = 0.0;
private double timeListPct = 0.0;
private double liveCutPct = 0.0;
private double timeCutPct = 0.0;
private int maxDays = Integer.MAX_VALUE;
private int minDays = 0;
@Override
public String ID()
{
return "DefaultAuctionPolicy";
}
public DefaultAuctionPolicy()
{
final List<String> ratesV = CMParms.parseCommas(CMProps.getVar(CMProps.Str.AUCTIONRATES), true);
while (ratesV.size() < 7)
ratesV.add("0");
liveListPrice = CMath.s_double(ratesV.get(0));
timeListPrice = CMath.s_double(ratesV.get(1));
timeListPct = CMath.s_pct(ratesV.get(2));
liveCutPct = CMath.s_pct(ratesV.get(3));
timeCutPct = CMath.s_pct(ratesV.get(4));
minDays = CMath.s_int(ratesV.get(5));
maxDays = CMath.s_int(ratesV.get(6));
if (minDays > maxDays)
minDays = maxDays;
}
@Override
public String name()
{
return ID();
}
@Override
public int compareTo(final CMObject o)
{
return CMClass.classID(this).compareToIgnoreCase(CMClass.classID(o));
}
@Override
public CMObject newInstance()
{
return new DefaultAuctionPolicy();
}
@Override
public CMObject copyOf()
{
try
{
return (CMObject)this.clone();
}
catch(final Exception e)
{
return newInstance();
}
}
@Override
public void initializeClass()
{
}
@Override
public void mergeAuctioneerPolicy(final Auctioneer A)
{
if (A == null)
return;
final DefaultAuctionPolicy base = new DefaultAuctionPolicy();
liveListPrice = base.liveListPrice;
timeListPrice = A.timedListingPrice() < 0.0 ? base.timeListPrice : A.timedListingPrice();
timeListPct = A.timedListingPct() < 0.0 ? base.timeListPct : A.timedListingPct();
liveCutPct = base.liveCutPct;
timeCutPct = A.timedFinalCutPct() < 0.0 ? base.timeCutPct : A.timedFinalCutPct();
maxDays = A.maxTimedAuctionDays() < 0 ? base.maxDays : A.maxTimedAuctionDays();
minDays = A.minTimedAuctionDays() < 0 ? base.minDays : A.minTimedAuctionDays();
if (minDays > maxDays)
minDays = maxDays;
}
@Override
public double timedListingPrice()
{
return this.timeListPrice;
}
@Override
public void setTimedListingPrice(final double d)
{
this.timeListPrice = d;
}
@Override
public double timedListingPct()
{
return this.timeListPct;
}
@Override
public void setTimedListingPct(final double d)
{
this.timeListPct = d;
}
@Override
public double timedFinalCutPct()
{
return this.timeCutPct;
}
@Override
public void setTimedFinalCutPct(final double d)
{
this.timeCutPct = d;
}
@Override
public int maxTimedAuctionDays()
{
return this.maxDays;
}
@Override
public void setMaxTimedAuctionDays(final int d)
{
this.maxDays = d;
}
@Override
public int minTimedAuctionDays()
{
return this.minDays;
}
@Override
public void setMinTimedAuctionDays(final int d)
{
this.minDays = d;
}
@Override
public double liveListingPrice()
{
return this.liveListPrice;
}
@Override
public void setLiveListingPrice(final double d)
{
this.liveListPrice = d;
}
@Override
public double liveFinalCutPct()
{
return this.liveCutPct;
}
@Override
public void setLiveFinalCutPct(final double d)
{
this.liveCutPct = d;
}
}
|
{
"pile_set_name": "Github"
}
|
; Open
; Compile with: nasm -f elf open.asm
; Link with (64 bit systems require elf_i386 option): ld -m elf_i386 open.o -o open
; Run with: ./open
%include 'functions.asm'
SECTION .data
filename db 'readme.txt', 0h ; the filename to create
contents db 'Hello world!', 0h ; the contents to write
SECTION .text
global _start
_start:
mov ecx, 0777 ; Create file from lesson 22
mov ebx, filename
mov eax, 8
int 80h
mov edx, 12 ; Write contents to file from lesson 23
mov ecx, contents
mov ebx, eax
mov eax, 4
int 80h
mov ecx, 0 ; flag for readonly access mode (O_RDONLY)
mov ebx, filename ; filename we created above
mov eax, 5 ; invoke SYS_OPEN (kernel opcode 5)
int 80h ; call the kernel
call iprintLF ; call our integer printing function
call quit ; call our quit function
|
{
"pile_set_name": "Github"
}
|
//
// Jumbotron
// --------------------------------------------------
.jumbotron {
padding: @jumbotron-padding;
margin-bottom: @jumbotron-padding;
color: @jumbotron-color;
background-color: @jumbotron-bg;
h1,
.h1 {
color: @jumbotron-heading-color;
}
p {
margin-bottom: (@jumbotron-padding / 2);
font-size: @jumbotron-font-size;
font-weight: 200;
}
.container & {
border-radius: @border-radius-large; // Only round corners at higher resolutions if contained in a container
}
.container {
max-width: 100%;
}
@media screen and (min-width: @screen-sm-min) {
padding-top: (@jumbotron-padding * 1.6);
padding-bottom: (@jumbotron-padding * 1.6);
.container & {
padding-left: (@jumbotron-padding * 2);
padding-right: (@jumbotron-padding * 2);
}
h1,
.h1 {
font-size: (@font-size-base * 4.5);
}
}
}
|
{
"pile_set_name": "Github"
}
|
{
"name" : "1611.09573.pdf",
"metadata" : {
"source" : "CRF",
"title" : null,
"authors" : [ ],
"emails" : [ "anoop.res15@iiitmk.ac.in", "asharaf.s@iiitmk.ac.in", "deepaksp@acm.org" ],
"sections" : [ {
"heading" : null,
"text" : "ar X\niv :1\n61 1.\n09 57\n3v 1\n[ cs\n.A I]\n2 9\nN ov\n2 01\n6 Learning Concept Hierarchies through Probabilistic Topic Modeling\nV S Anoopa, S Asharafb, Deepak Pc\naData Engineering Lab, Indian Institute of Information Technology and Management - Kerala (IIITM-K), Thiruvananthapuram 695 581, India, Contact: anoop.res15@iiitmk.ac.in\nbIndian Institute of Information Technology and Management - Kerala (IIITM-K), Thiruvananthapuram 695 581, India, Contact: asharaf.s@iiitmk.ac.in\ncQueen’s University, Belfast, UK, Contact: deepaksp@acm.org\nWith the advent of semantic web, various tools and techniques have been introduced for presenting and organizing knowledge. Concept hierarchies are one such technique which gained significant attention due to its usefulness in creating domain ontologies that are considered as an integral part of semantic web. Automated concept hierarchy learning algorithms focus on extracting relevant concepts from unstructured text corpus and connect them together by identifying some potential relations exist between them. In this paper, we propose a novel approach for identifying relevant concepts from plain text and then learns hierarchy of concepts by exploiting subsumption relation between them. To start with, we model topics using a probabilistic topic model and then make use of some lightweight linguistic process to extract semantically rich concepts. Then we connect concepts by identifying an ”is-a” relationship between pair of concepts. The proposed method is completely unsupervised and there is no need for a domain specific training corpus for concept extraction and learning. Experiments on large and real-world text corpora such as BBC News dataset and Reuters News corpus shows that the proposed method outperforms some of the existing methods for concept extraction and efficient concept hierarchy learning is possible if the overall task is guided by a probabilistic topic modeling algorithm.\nKeywords : Probabilistic Topic Models, Concept Extraction, Subsumption Hierarchy Learning, Natural Language Processing, Semantic Web, Text Mining."
}, {
"heading" : "1. INTRODUCTION",
"text" : "Due to rapid growth of text producing and consuming applications, numerous tools and techniques were introduced in the recent past for extracting useful patterns from unstructured text. These patterns are crucial for organizations to discover knowledge out of it and aid in making intelligent decisions. As the amount of such data grows exponentially, already available algorithms performs poor on the scalability and performance aspects. But there are still a lot of avenues where text data is yet to be exploited fully and thus we need new and efficient algorithms to tackle this situation. Platforms such as social networks, e-commerce websites, blogs and research journals generate such data in the form of unstructured\ntext and it is essential to analyze, synthesis and process such data for efficient retrieval of useful information.\nIn text mining, concepts are defined as a sequence of words that constitute real or imaginary entities. Extraction of such entities are non-trivial for applications such as automated ontology generation [1], document summarization [2] and aspect oriented sentiment analysis [3] to name a few. This is the era of data explosion thus it is very difficult to store, process, manage and most importantly to extract knowledge out of it. To overcome this shortfall, a significant amount of research has been carried out in the recent past for leveraging underlying thematic and semantic structure from text archives. As a\n1\nresult a good number of algorithmic techniques were introduced which are proved to be efficient for the discovery of themes and semantics underlying high dimensional data.\nTopic Models are suite of text understanding algorithms which statistically generate latent themes pervade a large collection of unstructured text. Since its inception, text mining researchers and practitioners are using it extensively to analyze and organize large document collections. They are unsupervised learning algorithms thus it does not require user tagged corpus to work with. A large number of topic modeling algorithms have been reported in the past with the difference in the assumption they make for modeling topics. Models such as Probabilistic topic models [4] and Latent Dirichlet Allocation (LDA) are some such flavors of topic modeling that attained significant attention.\nContributions: This work proposes a novel unsupervised approach for learning concept hierarchies from large unstructured text corpus which is guided by a probabilistic topic modeling approach. To begin with, we model topics from the corpus using Latent Dirichlet Allocation (LDA) algorithm and then uses a lightweight linguistic process to identify concepts which are close to the real world understanding. Then we make use of a subsumption relation [5] (”is-a”) to connect concepts which are related thus forms a hierarchy of concepts.\nOrganization: The rest of this paper is organized as follows. We briefly review related works in Section 2. Section 3 introduces the novel approach we have proposed. Detailed explanation of the implementation details is presented in Section 4, and the evaluation of the proposed method is discussed in Section 5. and finally we draw a conclusion and discuss future work in Section 6."
}, {
"heading" : "2. PROBLEM DEFINITION AND RELATED WORK",
"text" : ""
}, {
"heading" : "2.1. Problem Definition",
"text" : "Here, we define the problem formally. Given a large corpus containing unstructured text documents, our problem is to automatically generate concept hierarchies which are close to human understanding. In a nutshell, this paper aims to answer the following research questions :\n1. Is it possible to automatically extract human interpretable concepts from statistically generated topics using a lightweight linguistic process ?\n2. Can our proposed method learn a hierarchy of such concepts incorporating a subsumption relation between them, which are important in automated ontology generation ?\n3. Given a large but unstructured text corpus, can our topic modeling guided method better extracts and learns concept hierarchies compared to existing algorithms ?\nMany recent works have been reported in this direction which proposed many algorithms to extract semantically rich concepts from plain text. In the following section, we due acknowledge some past literatures that discusses methods which are close to our proposed algorithm. Notations used in this paper: To help narrative, some commonly used notations are shown in Table 1 that are used in the rest of this paper."
}, {
"heading" : "2.2. Related Work",
"text" : "Concept extraction is the process of extracting real or imaginary entities from plain text that has got wider recognition in the recent past. This is due to the wide variety of applications which are mainly dealing with text data such as e-commerce websites, research articles etc. Thus a significant number of research literatures are available in the field of concept extraction and mining which proposes many algorithms with varying degrees of success. In this section, we give emphasis on past literatures in automated concept extraction and hierarchy learning algorithms and briefly discuss works closely related to our proposed framework.\nPhrase discovering topic model [6] that uses pitman-yor process and TopMine [7] were two notable works that proposed algorithms for mining topical phrases from text documents. The former constructs a topic-word matrix before modeling topics but disadvantage of the approach was that creating such a matrix for large volume of text is often difficult. The latter approach makes use of a two stage process for modeling topics and mainly works with clinical documents. First it identifies phrases using some off-the-shelf tools and then trains a topic model with the identified phrases. Another work which uses topic models for generating multi-word phrases was the topical n-gram [8]. This makes use of some switching variable for identifying a new n-gram. The assumption of this method was that the words within an n-gram usually won’t share same topic, which may not be true all the time.\nAutomatic Concept Extractor (ACE), a system specifically designed for extracting concepts from HTML pages and making use of the text body and some visual clues on HTML tags for identifying potential concepts was proposed by Ramirez and Mattmann [9]. Even though this method could outperform some state of the art methods, dependency with HTML was a major drawback. Turney[10] proposed another system named GenEx, which employed a genetic algorithm supported rule learning mechanism for concept extraction.\nA system which extracts concepts from user tag and query log dataset is proposed by Parameswaran et.al.[11] which uses techniques similar to association rule mining. This method uses features such as frequency of occurrences and the popularity among users for extracting core concepts and attempts to build a web of concepts. Even though this algorithm can be applied to any large dataset, a lot of additional processing is required when dealing with web pages. A bag-of-word approach was proposed by Gelfand et.al.[12] for concept extraction from plain text and used these to form a closely tied semantic relations graph for representing relationships between them. They have applied this technique specifically for some classification tasks and found that their method produces better concepts than the Naive Bayes text classifier.\nDheeraj Rajagopal et.al.[13] introduced another graph based approach for commonsense concept extraction and detection of semantic similarity among those concepts. They used a manually labeled dataset of 200 multi-word concept pairs for evaluating their parser capable of detecting semantic similarity and showed that their method was capable of effectively finding syntactically and semantically related concepts. The main disadvantage of that method is the use of manually labeled dataset and the creation of such dataset is time consuming and requires human effort. Another work reported in this domain is the method proposed by Krulwich and Burkey [14] which uses a simple heuristics rule based approach to extract key phrases from document by considering visual clues such as the usage of bold and italic characters as features. They have shown that this technique can be extended for automatic document classification experiments.\nA key phrase extraction system called Automatic Keyphrase Extraction (KEA) developed by Witten et.al[15] was reported in the concept extraction literatures which creates a Naive Bayes learning model with known key phrases extracted from training documents and uses this model for inferring key phrases from new set of documents.\nAs an extension to this KEA framework, Song et. al.[16] proposed a method which uses the information gain measure for ranking candidate key phrases based on some distance and tf-idf features which was first introduced in [15]. Another impressive and widely used method was introduced by Frantzi et. al.[17] which extracts multi-word terms from medical documents and named as C/NC method. The algorithm uses a POS tagger POS patten filter for collecting noun phrases and then uses some statistical measures for determining the termhood of candidate multi-words.\nThe proposed method in this paper is a hybrid approach incorporating statistical methods such as topic modeling and tf-itf weighting and some lightweight linguistic processes such as POS tagging and analysis for leveraging concepts from text. We expect the learnt concept hierarchy to be close to the real world understanding of concepts which we will quantify using evaluation measures such as precision, recall and f-measure."
}, {
"heading" : "3. BACKGROUND : LATENT DIRICHLET ALLOCATION (LDA)",
"text" : "A good number of topic modeling algorithms are introduced in the recent past which varies in their method of working mainly with the assumptions they adopt for the statistical processing. An automated document indexing method based on a latent class model for factor analysis of count data in the latent semantic space has been introduced by Thomas Hofman [18]. This generative data model called Probabilistic Latent Semantic Indexing (PLSI), considered as an alternative to the basic Latent Semantic Indexing has a strong statistical foundation. The basic assumption of PLSI is that each word in a document corresponds to only one topic.\nLater, Blei et. al.[19] introduced a new topic modeling algorithm known as Latent Dirichlet Allocation (LDA) which is more efficient and attractive than PLSI. This model assumes that a document contain multiple topics and such topics are leveraged using a Dirichlet Prior process. In the following section, we will briefly describe the un-\nderlying principle of LDA. Even though a LDA works well on broad ranges of discrete datasets, the text is considered to be a typical example to which the model can be best applied. The process of generating a document with n words by LDA can be described as follows[19]:\n1. Choose the number of words, n, according to Poisson Distribution;\n2. Choose the distribution over topics, θ, for this document by Dirichlet Distribution;\n(a) Choose a topic T (i) ∼ Multinomial(θ)\n(b) Choose a word W (i) from P ( W (i)|T (i), β )\nThus the marginal distribution of the document can be obtained from the above process as :\nP (d) =\n∫\nθ\n(\nn ∏\ni=1\n∑\nT (i)\nP (W (i)|T (i), β)P (T (i)|θ)\n)\n+P (θ|α)dθ\nwhere, P (θ|α) is derived by Dirichlet Distribution parameterized by α, and P (W (i))|T (i), β) is the probability of W (i) under topic T (i) parameterized by β. The parameter α can be viewed as a prior observation counting on the number of times each topic is sampled in a document, before we actually seen any word from that document. The parameter β is a hyper-parameter determining the number of times words are sampled from a topic [19], before any word of the corpus is observed. At the end, the probability of the whole corpus D can be derived by taking the product of all documents’ marginal probability as given below:\nP (D) =\nM ∏\ni=1\nP (di) (1)"
}, {
"heading" : "4. PROPOSED APPROACH",
"text" : "In the area of text mining, topic models or specifically probabilistic topic models are suite of algorithms which got wider recognition for its ability to leverage hidden thematic information from huge archives of text data. Text mining\nresearchers are making use of topic modeling algorithms such as Latent Semantic Analysis (LSA) [20], Probabilistic Latent Semantic Indexing (pLSI) [21], Latent Dirichlet Allocation (LDA) [22] etc extensively for bringing out the themes or so called ”topics” from high dimensional unstructured data.\nAmong all these algorithms, LDA has got lot of attention in the recent past and is widely using because of its easiness of implementation and potential applications. Even though the power of LDA algorithm has been extensively used for leveraging topics, very few studies have been reported for mapping these statistically outputted topics to semantically rich concepts. Our proposed framework is an attempt to address this issue by making use of LDA algorithm to generate topics and we leverage concepts from such topics by using a new statistical weighting scheme and some lightweight linguistic processes. The overall work flow of the proposed approach is depicted in Fig.1.\nOur framework can be divided into 2 modules (i) concept extraction and (ii) concept hierarchy learning. The concept extraction module extract concepts from topics generated by LDA algorithm and the concept hierarchy learning module learns a hierarchy of extracted concepts by inducing a subsumption hierarchy learning algorithm. Detailed explanation of these modules are given below."
}, {
"heading" : "4.1. Concept Extraction",
"text" : "In this module, we introduce a topic to concept mapping procedure for leveraging potential concepts from statistically computed topics which are generated by the LDA algorithm. The first step of the proposed framework deals with the preprocessing of data which is meant for removing unwanted and irrelevant data and noises. Latent Dirichlet Allocation algorithm is executed on top of this preprocessed data which in turn generate topics through the statistical process. A total of 50 topics have been extracted by tuning the parameters of LDA algorithm. Once we got the sufficient topics for the experiment, for each\ntopic, we have created a topic - document cluster by grouping the documents which generated such a topic and the same process has been executed for all topics under consideration.\nNow, we introduce a new weighting scheme called tf − itf (term frequency - inverse topic frequency) which is used for finding out highly contributing topic word in each topic. We bring this weighting scheme to filter out the relevant candidate topic words. Term frequency (tf) is the total number of times that particular topic word comes in the topic - document clusters. Normalized term frequency, Ntf of a topic word Tw can be calculated as:\nNtf = count(Tw) in Ctd\ncount(total terms in Ctd) (2)\nInverse topic frequency Itf is calculated as:\nItf = count(total terms in Ctd)\ncount(documents with Tw) (3)\ntf−itf is calculated using the following equation:\ntf − itf = Ntf ∗ Itf (4)\nThis step is followed by a sentence extraction process in which all the sentences which contain the topic words which have high tf-itf weight are extracted. Next, we apply a parts of speech tagging on these sentences and extract only noun and adjective tags as we are only concentrating on the extraction of concepts. In linguistic preprocessing step, we take Noun + Noun, Noun + Adjective and (Adjective / Noun) + Noun combinations of words from the tagged collection. Concept identification is the last step in the process flow in which we find out the term count of all the combinations of Noun + Noun, Noun + Adjective and (Adjective / Noun) + Noun. A positive term count implies that the current multi word can be a potential ”concept” and if we get a zero term count, then that multi word can be ignored. The newly proposed algorithm for extracting the concepts is shown in Algorithm 1."
}, {
"heading" : "4.2. Concept Hierarchy Learning",
"text" : "In this module we derive hierarchical organization of leveraged concepts using a type of co-\nAlgorithm 1 Concept Extraction\n1: procedure ExtractConcepts(tc) 2: ∀ t, create Ctd 3: ∀Ctd, compute tf − itf weight 4: ∀ t, choose n words with highest tf − itf 5: S[ ] = sentences with top tf − itf words 6: POS tag(S) 7: W [ ] = (NNP,NNS,NN, JJ) 8: MWc[ ] = noun+ noun|adj + noun 9: while |MWc| 6= 0 do\n10: termCount(MW ) ∀ MW in MWc 11: if Tc > 0 then 12: Add MW into C 13: Remove MW from MWc 14: Fetch next MW from MWc 15: else 16: Remove MW from MWc 17: Fetch next MW from MWc 18: end if 19: end while 20: end procedure\noccurrence called ”subsumption” relation. Subsumption relation is found to be simple but very effective way of inferring relationships between words and phrases without using any training data or clustering methods. The basic idea behind subsumption relation is very simple : for any two concepts Ca and Cb, Ca is said to be subsume Cb if 2 conditions hold. P (Ca|Cb) = 1 and P (Cb|Ca) < 1. To be more specific, Ca subsumes Cb if the documents which Cb occurs in are a subset of the documents which Ca occurs in. Because Ca subsumes Cb and because it is more frequent, in the hierarchy, Ca is the parent of Cb.\nAlgorithm 2 Concept Hierarchy Learning\n1: procedure LearnHierarchy(C) 2: Choose pair of concepts, say Ca and Cb 3: Compute P (Ca|Cb) and P (Cb|Ca) 4: if P (Ca|Cb) = 1 and P (Cb|Ca) < 1 then 5: Assign Ca as the parent of Cb 6: else 7: Fetch next concept pairs 8: end if 9: Goto step 2, repeat ∀ Ca, Cb\n10: end procedure"
}, {
"heading" : "5. EXPERIMENTAL SETUP",
"text" : "This section concentrates on the implementation details of our proposed framework and concept extraction and hierarchy learning procedures are discussed in detail."
}, {
"heading" : "5.1. Concept Extraction",
"text" : "Here, concept extraction module of the framework is discussed. This module concentrates on tasks such as data collection and pre-processing, topic modeling, topic-document clustering, tf-itf weighting, sentence extraction and POS tagging, linguistic pre-processing etc for identifying concepts and a detailed explanation of each step is given below."
}, {
"heading" : "5.1.1. Dataset Collection and Preprocessing",
"text" : "We are using publicly available datasets such as Reuters Corpus Volume 1 dataset[24] and BBC News Dataset[25] for the experiment. Reuters is the world’s biggest international news agency and cater different news and related information through their website, video, interactive television and mobile platforms. Reuters Corpus Volume 1 is in XML format and is freely available for research purpose. Text messages are extracted by a thorough pre-processing such as removing XML tags, URLs and other special symbols and then created a new dataset exclusively for our experiment. BBC provides two benchmarked news article datasets which is freely available for machine learning research. The general BBC dataset consist of 2225 text documents directly from their website corresponding to stories in five areas such as business, entertainment, politics, sports and technology, from 2004 to 2005. A thorough preprocessing such as stemming, and removal of stop-word, URLs and special characters on this dataset and made an experiment ready copy of the original dataset."
}, {
"heading" : "5.1.2. Topic Modeling",
"text" : "Latent Dirichlet Allocation (LDA) algorithm has been applied on the pre-processed dataset to leverage topics for this experiment. The number of iterations is set to 300 as Gibbs sampling method usually approaches the target distribu-\ntion after 300 iterations. The number of topics is set to 50 and a snapshot of 5 topics we have randomly chosen is shown in Table 2."
}, {
"heading" : "5.1.3. Topic - Document Clustering",
"text" : "In this step, we consider each topic and then grouped and clustered top 50 documents which contributed the creation of that specific topic. This has been done for all the 50 topics of our choice. As an outcome, we have got 50 such clusters that contain documents which generated the topics."
}, {
"heading" : "5.1.4. TF-ITF Weighting",
"text" : "Here, we compute the tf − itf(term frequency − inverse topic frequency) weight of each word in every topic using Eq.(3),\nEq.(4) and Eq.(5) to find out highly used topic words in the collection. Table 2 also shows topic words along with their tf-itf weight."
}, {
"heading" : "5.1.5. Sentence Extraction & POS Tagging",
"text" : "In sentence extraction step, we consider topic words having highest tf-itf weight and then extract sentences containing these topic words from the topic - document clusters. Then a parts of speech tagging has been done to identify words tagged as nouns and adjectives from these sentences as our aim is to extract potential ”concepts” from the repository. For this experiment, Natural Language Toolkit (NLTK) [23] has been used which contains libraries for Natural Language Processing for Python programming language."
}, {
"heading" : "5.1.6. Linguistic Processing & Concept Identification",
"text" : "All words which are tagged as Nouns(NN/NNP/NNS) and Adjectives (JJ) are filtered out and all possible combinations of Noun + Noun,Adjective + Noun and (Noun/Adjective) + Noun. The results are shown in Table 3. The term count for each of these multi word term is then calculated against the original corpus and a positive term count implies that the corresponding multi-word term can be a potential concept and we eliminate the term if we get a zero term count. This process\nhas been repeated for all the multi-words we have filtered out."
}, {
"heading" : "5.2. Concept Hierarchy Learning",
"text" : "Concept hierarchy learning module concentrates on leveraging a subsumption hierarchy[5] depicting an ”is-a” relation between the concepts identified by the proposed algorithm. Subsumption relation is simple but considered as an important relationship type in any ontological structure and we calculate two probability conditions for the same. For any given two concepts, we first calculate P (C1|C2) and then P (C2|C1), in order to establish a subsumption relation, the former probability must be 1 and the latter should be less than 1. In other words, C1 subsumes C2 if the documents in which C2 occurs is a subset of the documents which C1 occurs in.\nFor instance, consider two concepts dial-up internet and network connection, the proposed method computes P (dialup internet|network connection) and P (network connection|dial − up internet) and found that the number of documents in which dialup internet occurs is a subset of number of documents in which network connection occurs. That means there exists a subsumption relation between these two concepts and dialup internet concept may be subsumed by network connection concept. This process has been repeated for all concepts in the collection, and a part of such a hierarchy generated using our proposed algorithm is shown in Fig. 2."
}, {
"heading" : "6. EVALUATION OF RESULTS",
"text" : "Here we evaluate the results produced by our proposed method and precision and recall measures are used for evaluating the quality of concepts leveraged. We have first created a human generated concept repository and kept for verifying against the machine generated concepts. Precision computes the fraction of machine extracted concepts that are also human generated, and recall measures concepts which are extracted by proposed algorithm that are also human authored. In information retrieval, it is estimated\nthat achieving high precision and recall at same time is difficult and using a measure called F1, we can balance these two. Here, true positive is defined as the number of overlapped concepts between human generated concepts and concepts extracted by our proposed algorithm, false positive is the number of extracted concepts that are not truly human authored concepts and false negative is the human authored concepts that are missed by the concept extraction method. Using these measures, we have compared our proposed method against some of the existing concept extraction algorithms and the result is shown in Table 4.\nFrom the performance graph shown in Figure 4, it is clear that our proposed algorithm extracts more concepts as the number of topics are increasing. The other baseline algorithms such as ACE and ICE performs poor when the number of topics are increased randomly. This shows that the proposed algorithm outperforms the baseline algorithms when extracting real-world concepts from large number of statistically generated top-\nics."
}, {
"heading" : "7. CONCLUSIONS AND FUTURE WORK",
"text" : "This paper proposed a novel framework for extracting close to real world concepts from large collection of unstructured text documents which is guided by a probabilistic topic modeling algorithm. Proposed method also deals with learning a subsumption hierarchy which exploits ”is-a” relationships among identified concepts which is extensively used in ontology generation. Experiments conducted on large datasets such as Reuters and BBC news corpus shows that the proposed method outperforms some of the already available algorithms and better concept\nidentification is possible with this framework.\nBecause of the promising end results, we are interested to work mainly on the directions of measuring the scalability of proposed framework by using more large datasets. Apart from the basic subsumption hierarchy which depicts ”is-a” relation, our future work will be on leveraging other relations that exist between concepts we would like to so that a this framework can automate the complete ontology generation process."
}, {
"heading" : "12 V S Anoop, S Asharaf and Deepak P",
"text" : "Anoop V S is a full time Ph.D Research Scholar at Data Engineering Lab, Indian Institute of Information Technology and Management - Kerala (IIITM-K), Thiruvananthapuram, India. He received his Masters in Computer Applications (MCA) -\nfrom IGNOU and Master of Philosophy in Computer Science from Cochin University of Science and Technology (CUSAT), Kerala in 2014. He has several publications in international journals, conference proceedings and book chapters. His research interests include Information Retrieval, Text Mining and NLP.\nAsharaf S is an Associate Professor at Indian Institute of Information Technology and Management - Kerala (IIITM-K), Thiruvananthapuram, India. He received his Ph.D and Master of Engineering degrees in Computer Science and Engineering -\nfrom Indian Institute of Science, Bangalore. His areas of interest include algorithms, business models and software systems related to data mining, data analytics, information retrieval, computational advertising, soft computing and machine learning.\nDeepak Padmanabhan is a Lecturer (Asst. Professor) in Computer Science at Queens University Belfast, UK. He completed his M.Tech and PhD from Indian Institute of\nTechnology Madras, all in Computer Science. His current research interests include data analytics, similarity search, information retrieval and natural language processing. He has published over 40 research papers across major venues in Information and Knowledge Management. He is a Senior Member of the IEEE and ACM."
} ],
"references" : [ {
"title" : "Semi-automated Ontology Creation for Semantic Search in Business Process Exploration",
"author" : [ "Pospiech", "Sebastian", "Martin Pelke", "Robert Mertens" ],
"venue" : "IEEE Tenth International Conference on Semantic Computing (ICSC).,",
"citeRegEx" : "1",
"shortCiteRegEx" : "1",
"year" : 2016
}, {
"title" : "Exploring events and distributed representations of text in multidocument summarization",
"author" : [ "Marujo", "Lus" ],
"venue" : "Knowledge-Based Systems,",
"citeRegEx" : "2",
"shortCiteRegEx" : "2",
"year" : 2016
}, {
"title" : "Aspect term extraction for sentiment analysis in large movie reviews us ing Gini Index feature selection method and SVM classifier",
"author" : [ "AS Manek", "PD Shenoy", "MC Mohan", "KR. Venugopal" ],
"venue" : "World Wide Web,",
"citeRegEx" : "3",
"shortCiteRegEx" : "3",
"year" : 2016
}, {
"title" : "Probabilistic topic models",
"author" : [ "M Steyvers", "T. Griffiths" ],
"venue" : "Handbook of latent semantic analysis.,",
"citeRegEx" : "4",
"shortCiteRegEx" : "4",
"year" : 2007
}, {
"title" : "Deriving concept hierarchies from text",
"author" : [ "M Sanderson", "B. Croft" ],
"venue" : "InProceedings of the 22nd annual international ACM SIGIR conference on Research and development in information retrieval,",
"citeRegEx" : "5",
"shortCiteRegEx" : "5",
"year" : 1999
}, {
"title" : "A phrase-discovering topic model using hierarchical pitman-yor processes",
"author" : [ "RV Lindsey", "WP Headden III", "MJ. Stipicevic" ],
"venue" : "InProceedings of the 2012 Joint Conference on Empirical Methods in Natural Language Processing and Computational Natural Language Learning, (pp. 214-222),",
"citeRegEx" : "6",
"shortCiteRegEx" : "6",
"year" : 2012
}, {
"title" : "Scalable topical phrase mining from text corpora",
"author" : [ "A El-Kishky", "Y Song", "C Wang", "CR Voss", "J. Han" ],
"venue" : "Proceedings of the VLDB Endowment.,",
"citeRegEx" : "7",
"shortCiteRegEx" : "7",
"year" : 2014
}, {
"title" : "Topical n-grams: Phrase and topic discovery, with an application to information retrieval",
"author" : [ "X Wang", "A McCallum", "X. Wei" ],
"venue" : "InSeventh IEEE International Conference on Data Mining (ICDM",
"citeRegEx" : "8",
"shortCiteRegEx" : "8",
"year" : 2007
}, {
"title" : "ACE: improving search engines via Automatic Concept Extraction",
"author" : [ "Ramirez PM", "Mattmann CA" ],
"venue" : "InInformation Reuse and Integration,",
"citeRegEx" : "9",
"shortCiteRegEx" : "9",
"year" : 2004
}, {
"title" : "Learning algorithms for keyphrase extraction",
"author" : [ "Turney PD" ],
"venue" : "Information Retrieval.,",
"citeRegEx" : "10",
"shortCiteRegEx" : "10",
"year" : 2000
}, {
"title" : "Towards the web of concepts: Extracting concepts from large datasets",
"author" : [ "A Parameswaran", "H Garcia-Molina", "A. Rajaraman" ],
"venue" : "Proceedings of the VLDB Endowment.,",
"citeRegEx" : "11",
"shortCiteRegEx" : "11",
"year" : 2010
}, {
"title" : "Automated concept extraction from plain text",
"author" : [ "B Gelfand", "M Wulfekuler", "PunchWF" ],
"venue" : "AAAI",
"citeRegEx" : "12",
"shortCiteRegEx" : "12",
"year" : 1998
}, {
"title" : "A graph-based approach to commonsense concept extraction and semantic similarity detection",
"author" : [ "D Rajagopal", "E Cambria", "D Olsher", "K. Kwok" ],
"venue" : "In Proceedings of the 22nd interna Learning Concept Hierarchies through Probabilistic Topic Modeling 11 tional conference on World Wide Web companion,",
"citeRegEx" : "13",
"shortCiteRegEx" : "13",
"year" : 2013
}, {
"title" : "Learning user information interests through extraction of semantically significant phrases",
"author" : [ "B Krulwich", "C. Burkey" ],
"venue" : "InProceedings of the AAAI spring symposium on machine learning in information access,",
"citeRegEx" : "14",
"shortCiteRegEx" : "14",
"year" : 1996
}, {
"title" : "KEA: Practical automatic keyphrase extraction",
"author" : [ "IH Witten", "GW Paynter", "E Frank", "C Gutwin", "CG. Nevill-Manning" ],
"venue" : "InProceedings of the fourth ACM conference on Digital libraries.,",
"citeRegEx" : "15",
"shortCiteRegEx" : "15",
"year" : 1999
}, {
"title" : "KPSpotter: a flexible information gain-based keyphrase extraction system",
"author" : [ "M Song", "IY Song", "X. Hu" ],
"venue" : "InProceedings of the 5th ACM international workshop on Web information and data management.,",
"citeRegEx" : "16",
"shortCiteRegEx" : "16",
"year" : 2003
}, {
"title" : "Automatic recognition of multi-word terms",
"author" : [ "K Frantzi", "S Ananiadou", "H. Mima" ],
"venue" : "the c-value/nc-value method. International Journal on Digital Libraries.,",
"citeRegEx" : "17",
"shortCiteRegEx" : "17",
"year" : 2000
}, {
"title" : "Probabilistic latent semantic indexing",
"author" : [ "T. Hofmann" ],
"venue" : "InProceedings of the 22nd annual international ACM SIGIR conference on Research and development in information retrieval,",
"citeRegEx" : "18",
"shortCiteRegEx" : "18",
"year" : 1999
}, {
"title" : "Latent dirichlet allocation",
"author" : [ "DM Blei", "AY Ng", "MI. Jordan" ],
"venue" : "Journal of machine Learning research.,",
"citeRegEx" : "19",
"shortCiteRegEx" : "19",
"year" : 2003
}, {
"title" : "Latent semantic analysis",
"author" : [ "Dumais ST" ],
"venue" : "Annual review of information science and technology.,",
"citeRegEx" : "20",
"shortCiteRegEx" : "20",
"year" : 2004
}, {
"title" : "Probabilistic latent semantic indexing",
"author" : [ "T. Hofmann" ],
"venue" : "InProceedings of the 22nd annual international ACM SIGIR conference on Research and development in information retrieval,",
"citeRegEx" : "21",
"shortCiteRegEx" : "21",
"year" : 1999
}, {
"title" : "Latent dirichlet allocation",
"author" : [ "DM Blei", "AY Ng", "MI. Jordan" ],
"venue" : "Journal of machine Learning research.,",
"citeRegEx" : "22",
"shortCiteRegEx" : "22",
"year" : 2003
}, {
"title" : "NLTK: the natural language toolkit",
"author" : [ "S. Bird" ],
"venue" : "InProceedings of the COLING/ACL on Interactive presentation sessions,",
"citeRegEx" : "23",
"shortCiteRegEx" : "23",
"year" : 2006
}, {
"title" : "RCV1: A New Benchmark Collection for Text Categorization Research",
"author" : [ "Lewis D", "Y Yang", "T Rose", "F. Li" ],
"venue" : "Journal of Machine Learn ing Research,",
"citeRegEx" : "24",
"shortCiteRegEx" : "24",
"year" : 2004
}, {
"title" : "Practical solutions to the problem of diagonal dominance in kernel document clustering",
"author" : [ "D Greene", "P. Cunningham" ],
"venue" : "InProceedings of the 23rd international conference on Machine learning,",
"citeRegEx" : "25",
"shortCiteRegEx" : "25",
"year" : 2006
} ],
"referenceMentions" : [ {
"referenceID" : 0,
"context" : "Extraction of such entities are non-trivial for applications such as automated ontology generation [1], document summarization [2] and aspect oriented sentiment analysis [3] to name a few.",
"startOffset" : 99,
"endOffset" : 102
}, {
"referenceID" : 1,
"context" : "Extraction of such entities are non-trivial for applications such as automated ontology generation [1], document summarization [2] and aspect oriented sentiment analysis [3] to name a few.",
"startOffset" : 127,
"endOffset" : 130
}, {
"referenceID" : 2,
"context" : "Extraction of such entities are non-trivial for applications such as automated ontology generation [1], document summarization [2] and aspect oriented sentiment analysis [3] to name a few.",
"startOffset" : 170,
"endOffset" : 173
}, {
"referenceID" : 3,
"context" : "Models such as Probabilistic topic models [4] and Latent Dirichlet Allocation (LDA) are some such flavors of topic modeling that attained significant attention.",
"startOffset" : 42,
"endOffset" : 45
}, {
"referenceID" : 4,
"context" : "Then we make use of a subsumption relation [5] (”is-a”) to connect concepts which are related thus forms a hierarchy of concepts.",
"startOffset" : 43,
"endOffset" : 46
}, {
"referenceID" : 5,
"context" : "Phrase discovering topic model [6] that uses pitman-yor process and TopMine [7] were two notable works that proposed algorithms for mining topical phrases from text documents.",
"startOffset" : 31,
"endOffset" : 34
}, {
"referenceID" : 6,
"context" : "Phrase discovering topic model [6] that uses pitman-yor process and TopMine [7] were two notable works that proposed algorithms for mining topical phrases from text documents.",
"startOffset" : 76,
"endOffset" : 79
}, {
"referenceID" : 7,
"context" : "Another work which uses topic models for generating multi-word phrases was the topical n-gram [8].",
"startOffset" : 94,
"endOffset" : 97
}, {
"referenceID" : 8,
"context" : "Automatic Concept Extractor (ACE), a system specifically designed for extracting concepts from HTML pages and making use of the text body and some visual clues on HTML tags for identifying potential concepts was proposed by Ramirez and Mattmann [9].",
"startOffset" : 245,
"endOffset" : 248
}, {
"referenceID" : 9,
"context" : "Turney[10] proposed another system named GenEx, which employed a genetic algorithm supported rule learning mechanism for concept extraction.",
"startOffset" : 6,
"endOffset" : 10
}, {
"referenceID" : 10,
"context" : "[11] which uses techniques similar to association rule mining.",
"startOffset" : 0,
"endOffset" : 4
}, {
"referenceID" : 11,
"context" : "[12] for concept extraction from plain text and used these to form a closely tied semantic relations graph for representing relationships between them.",
"startOffset" : 0,
"endOffset" : 4
}, {
"referenceID" : 12,
"context" : "[13] introduced another graph based approach for commonsense concept extraction and detection of semantic similarity among those concepts.",
"startOffset" : 0,
"endOffset" : 4
}, {
"referenceID" : 13,
"context" : "Another work reported in this domain is the method proposed by Krulwich and Burkey [14] which uses a simple heuristics rule based approach to extract key phrases from document by considering visual clues such as the usage of bold and italic characters as features.",
"startOffset" : 83,
"endOffset" : 87
}, {
"referenceID" : 14,
"context" : "al[15] was reported in the concept extraction literatures which creates a Naive Bayes learning model with known key phrases extracted from training documents and uses this model for inferring key phrases from new set of documents.",
"startOffset" : 2,
"endOffset" : 6
}, {
"referenceID" : 15,
"context" : "[16] proposed a method which uses the information gain measure for ranking candidate key phrases based on some distance and tf-idf features which was first introduced in [15].",
"startOffset" : 0,
"endOffset" : 4
}, {
"referenceID" : 14,
"context" : "[16] proposed a method which uses the information gain measure for ranking candidate key phrases based on some distance and tf-idf features which was first introduced in [15].",
"startOffset" : 170,
"endOffset" : 174
}, {
"referenceID" : 16,
"context" : "[17] which extracts multi-word terms from medical documents and named as C/NC method.",
"startOffset" : 0,
"endOffset" : 4
}, {
"referenceID" : 17,
"context" : "An automated document indexing method based on a latent class model for factor analysis of count data in the latent semantic space has been introduced by Thomas Hofman [18].",
"startOffset" : 168,
"endOffset" : 172
}, {
"referenceID" : 18,
"context" : "[19] introduced a new topic modeling algorithm known as Latent Dirichlet Allocation (LDA) which is more efficient and attractive than PLSI.",
"startOffset" : 0,
"endOffset" : 4
}, {
"referenceID" : 18,
"context" : "The process of generating a document with n words by LDA can be described as follows[19]:",
"startOffset" : 84,
"endOffset" : 88
}, {
"referenceID" : 18,
"context" : "The parameter β is a hyper-parameter determining the number of times words are sampled from a topic [19], before any word of the corpus is observed.",
"startOffset" : 100,
"endOffset" : 104
}, {
"referenceID" : 19,
"context" : "researchers are making use of topic modeling algorithms such as Latent Semantic Analysis (LSA) [20], Probabilistic Latent Semantic Indexing (pLSI) [21], Latent Dirichlet Allocation (LDA) [22] etc extensively for bringing out the themes or so called ”topics” from high dimensional unstructured data.",
"startOffset" : 95,
"endOffset" : 99
}, {
"referenceID" : 20,
"context" : "researchers are making use of topic modeling algorithms such as Latent Semantic Analysis (LSA) [20], Probabilistic Latent Semantic Indexing (pLSI) [21], Latent Dirichlet Allocation (LDA) [22] etc extensively for bringing out the themes or so called ”topics” from high dimensional unstructured data.",
"startOffset" : 147,
"endOffset" : 151
}, {
"referenceID" : 21,
"context" : "researchers are making use of topic modeling algorithms such as Latent Semantic Analysis (LSA) [20], Probabilistic Latent Semantic Indexing (pLSI) [21], Latent Dirichlet Allocation (LDA) [22] etc extensively for bringing out the themes or so called ”topics” from high dimensional unstructured data.",
"startOffset" : 187,
"endOffset" : 191
}, {
"referenceID" : 23,
"context" : "Dataset Collection and Preprocessing We are using publicly available datasets such as Reuters Corpus Volume 1 dataset[24] and BBC News Dataset[25] for the experiment.",
"startOffset" : 117,
"endOffset" : 121
}, {
"referenceID" : 24,
"context" : "Dataset Collection and Preprocessing We are using publicly available datasets such as Reuters Corpus Volume 1 dataset[24] and BBC News Dataset[25] for the experiment.",
"startOffset" : 142,
"endOffset" : 146
}, {
"referenceID" : 22,
"context" : "For this experiment, Natural Language Toolkit (NLTK) [23] has been used which contains libraries for Natural Language Processing for Python programming language.",
"startOffset" : 53,
"endOffset" : 57
}, {
"referenceID" : 4,
"context" : "Concept Hierarchy Learning Concept hierarchy learning module concentrates on leveraging a subsumption hierarchy[5] depicting an ”is-a” relation between the concepts identified by the proposed algorithm.",
"startOffset" : 111,
"endOffset" : 114
} ],
"year" : 2016,
"abstractText" : "With the advent of semantic web, various tools and techniques have been introduced for presenting and organizing knowledge. Concept hierarchies are one such technique which gained significant attention due to its usefulness in creating domain ontologies that are considered as an integral part of semantic web. Automated concept hierarchy learning algorithms focus on extracting relevant concepts from unstructured text corpus and connect them together by identifying some potential relations exist between them. In this paper, we propose a novel approach for identifying relevant concepts from plain text and then learns hierarchy of concepts by exploiting subsumption relation between them. To start with, we model topics using a probabilistic topic model and then make use of some lightweight linguistic process to extract semantically rich concepts. Then we connect concepts by identifying an ”is-a” relationship between pair of concepts. The proposed method is completely unsupervised and there is no need for a domain specific training corpus for concept extraction and learning. Experiments on large and real-world text corpora such as BBC News dataset and Reuters News corpus shows that the proposed method outperforms some of the existing methods for concept extraction and efficient concept hierarchy learning is possible if the overall task is guided by a probabilistic topic modeling algorithm.",
"creator" : "LaTeX with hyperref package"
}
}
|
{
"pile_set_name": "Github"
}
|
// Project OldRod - A KoiVM devirtualisation utility.
// Copyright (C) 2019 Washi
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
using System;
using AsmResolver;
using AsmResolver.PE.DotNet.Metadata;
using OldRod.Core;
using OldRod.Core.Architecture;
namespace OldRod.Pipeline
{
public class KoiVmAwareStreamReader : IMetadataStreamReader
{
private readonly IMetadataStreamReader _reader;
public KoiVmAwareStreamReader(ISegmentReferenceResolver referenceResolver, ILogger logger)
: this(referenceResolver, "#Koi", logger)
{
}
public KoiVmAwareStreamReader(ISegmentReferenceResolver referenceResolver, string koiStreamName, ILogger logger)
{
KoiStreamName = koiStreamName ?? throw new ArgumentNullException(nameof(koiStreamName));
Logger = logger;
_reader = new DefaultMetadataStreamReader(referenceResolver);
}
public string KoiStreamName
{
get;
}
public ILogger Logger
{
get;
}
public IMetadataStream ReadStream(MetadataStreamHeader header, IBinaryStreamReader reader)
{
return header.Name == KoiStreamName
? new KoiStream(KoiStreamName, new DataSegment(reader.ReadToEnd()), Logger)
: _reader.ReadStream(header, reader);
}
}
}
|
{
"pile_set_name": "Github"
}
|
goog.provide('os.metrics.ServersMetrics');
goog.require('os.ui.metrics.MetricNode');
goog.require('os.ui.metrics.MetricsPlugin');
goog.require('os.ui.slick.SlickTreeNode');
/**
* @extends {os.ui.metrics.MetricsPlugin}
* @constructor
*/
os.metrics.ServersMetrics = function() {
os.metrics.ServersMetrics.base(this, 'constructor');
this.setLabel('Servers');
this.setDescription('The servers window can be accessed through the settings window. It shows all of the ' +
'servers that are currently configured. It also allows you to add, edit, refresh and remove servers.');
// this.setTags(['TODO']);
this.setIcon('fa fa-database');
this.setCollapsed(true);
// manually build the tree
var leaf = this.getLeafNode();
this.addChild(leaf, {
label: 'Add Server',
description: 'Add a new server in the Servers section in settings.',
key: os.metrics.Servers.ADD_SERVER
});
this.addChild(leaf, {
label: 'View Server',
description: 'View details about a server in the Servers section in settings.',
key: os.metrics.Servers.VIEW
});
this.addChild(leaf, {
label: 'Refresh a server in the Servers section in settings.',
key: os.metrics.Servers.REFRESH
});
this.addChild(leaf, {
label: 'Edit a server in the Servers section in settings.',
key: os.metrics.Servers.EDIT
});
this.addChild(leaf, {
label: 'Delete a server in the Servers section in settings.',
key: os.metrics.Servers.REMOVE
});
};
goog.inherits(os.metrics.ServersMetrics, os.ui.metrics.MetricsPlugin);
|
{
"pile_set_name": "Github"
}
|
package com.dzw.controller;
import com.dzw.entity.UserMoodWord;
import com.dzw.service.MoodWordService;
import com.dzw.service.UserMoodService;
import com.dzw.util.UserUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping("api")
public class UserMoodCtrl {
@Autowired
private MoodWordService moodWordService;
@GetMapping("moodWord")
public Object getMoodWord(@RequestParam("context") String context,String img) {
Map<String,Object> map = moodWordService.publishMoodWord(context,img);
return map;
}
@GetMapping("personalMood")
public Object getpersonalMood() {
List<UserMoodWord> userMoodWords= moodWordService.selectMoodWordByUserId(UserUtil.getLoginUser().getId());
return userMoodWords;
}
@GetMapping("xingqiuMood")
public Object getxingqiuMood() {
List<UserMoodWord> userMoodWords= moodWordService.selectAllMoodWord();
return userMoodWords;
}
}
|
{
"pile_set_name": "Github"
}
|
[server]
hostname = "localhost"
node_ip = "127.0.0.1"
base_path = "https://$ref{server.hostname}:${carbon.management.port}"
[super_admin]
username = "admin"
password = "admin"
create_admin_account = true
[database.identity_db]
driver = "$env{IDENTITY_DATABASE_DRIVER}"
url = "$env{IDENTITY_DATABASE_URL}"
username = "$env{IDENTITY_DATABASE_USERNAME}"
password = "$env{IDENTITY_DATABASE_PASSWORD}"
[database.shared_db]
driver = "$env{SHARED_DATABASE_DRIVER}"
url = "$env{SHARED_DATABASE_URL}"
username = "$env{SHARED_DATABASE_USERNAME}"
password = "$env{SHARED_DATABASE_PASSWORD}"
[user_store]
type = "database_unique_id"
connection_url = "ldap://localhost:${Ports.EmbeddedLDAP.LDAPServerPort}"
connection_name = "uid=admin,ou=system"
connection_password = "admin"
base_dn = "dc=wso2,dc=org" #refers the base dn on which the user and group search bases will be generated
[keystore.primary]
file_name = "wso2carbon.jks"
password = "wso2carbon"
|
{
"pile_set_name": "Github"
}
|
[
{
"date": "2017-05-01",
"commits": 1,
"month": 4,
"day": 1,
"week": "0"
},
{
"date": "2017-05-02",
"commits": 1,
"month": 4,
"day": 2,
"week": "0"
},
{
"date": "2017-05-03",
"commits": 1,
"month": 4,
"day": 3,
"week": "0"
},
{
"date": "2017-05-04",
"commits": 1,
"month": 4,
"day": 4,
"week": "0"
},
{
"date": "2017-05-05",
"commits": 1,
"month": 4,
"day": 5,
"week": "0"
},
{
"date": "2017-05-06",
"commits": 1,
"month": 4,
"day": 6,
"week": "0"
},
{
"date": "2017-05-07",
"commits": 1,
"month": 4,
"day": 0,
"week": "1"
},
{
"date": "2017-05-08",
"commits": 1,
"month": 4,
"day": 1,
"week": "1"
},
{
"date": "2017-05-09",
"commits": 1,
"month": 4,
"day": 2,
"week": "1"
},
{
"date": "2017-05-10",
"commits": 1,
"month": 4,
"day": 3,
"week": "1"
},
{
"date": "2017-05-11",
"commits": 1,
"month": 4,
"day": 4,
"week": "1"
},
{
"date": "2017-05-12",
"commits": 1,
"month": 4,
"day": 5,
"week": "1"
},
{
"date": "2017-05-13",
"commits": 1,
"month": 4,
"day": 6,
"week": "1"
},
{
"date": "2017-05-14",
"commits": 1,
"month": 4,
"day": 0,
"week": "2"
},
{
"date": "2017-05-15",
"commits": 1,
"month": 4,
"day": 1,
"week": "2"
},
{
"date": "2017-05-16",
"commits": 1,
"month": 4,
"day": 2,
"week": "2"
},
{
"date": "2017-05-17",
"commits": 1,
"month": 4,
"day": 3,
"week": "2"
},
{
"date": "2017-05-18",
"commits": 1,
"month": 4,
"day": 4,
"week": "2"
},
{
"date": "2017-05-19",
"commits": 1,
"month": 4,
"day": 5,
"week": "2"
},
{
"date": "2017-05-20",
"commits": 1,
"month": 4,
"day": 6,
"week": "2"
},
{
"date": "2017-05-21",
"commits": 1,
"month": 4,
"day": 0,
"week": "3"
},
{
"date": "2017-05-22",
"commits": 1,
"month": 4,
"day": 1,
"week": "3"
},
{
"date": "2017-05-23",
"commits": 1,
"month": 4,
"day": 2,
"week": "3"
},
{
"date": "2017-05-24",
"commits": 1,
"month": 4,
"day": 3,
"week": "3"
},
{
"date": "2017-05-25",
"commits": 1,
"month": 4,
"day": 4,
"week": "3",
"lastWeek": true
},
{
"date": "2017-05-26",
"commits": 1,
"month": 4,
"day": 5,
"week": "3",
"lastWeek": true
},
{
"date": "2017-05-27",
"commits": 1,
"month": 4,
"day": 6,
"week": "3",
"lastWeek": true
},
{
"date": "2017-05-28",
"commits": 1,
"month": 4,
"day": 0,
"week": "4",
"lastWeek": true
},
{
"date": "2017-05-29",
"commits": 1,
"month": 4,
"day": 1,
"week": "4",
"lastWeek": true
},
{
"date": "2017-05-30",
"commits": 1,
"month": 4,
"day": 2,
"week": "4",
"lastWeek": true
},
{
"date": "2017-05-31",
"commits": 1,
"month": 4,
"day": 3,
"week": "4",
"lastWeek": true,
"lastDay": true
},
{
"date": "2017-06-01",
"commits": 1,
"month": 5,
"day": 4,
"week": "4"
},
{
"date": "2017-06-02",
"commits": 1,
"month": 5,
"day": 5,
"week": "4"
},
{
"date": "2017-06-03",
"commits": 1,
"month": 5,
"day": 6,
"week": "4"
},
{
"date": "2017-06-04",
"commits": 1,
"month": 5,
"day": 0,
"week": "5"
},
{
"date": "2017-06-05",
"commits": 1,
"month": 5,
"day": 1,
"week": "5"
},
{
"date": "2017-06-06",
"commits": 1,
"month": 5,
"day": 2,
"week": "5"
},
{
"date": "2017-06-07",
"commits": 1,
"month": 5,
"day": 3,
"week": "5"
},
{
"date": "2017-06-08",
"commits": 1,
"month": 5,
"day": 4,
"week": "5"
},
{
"date": "2017-06-09",
"commits": 1,
"month": 5,
"day": 5,
"week": "5"
},
{
"date": "2017-06-10",
"commits": 1,
"month": 5,
"day": 6,
"week": "5"
},
{
"date": "2017-06-11",
"commits": 1,
"month": 5,
"day": 0,
"week": "6"
},
{
"date": "2017-06-12",
"commits": 1,
"month": 5,
"day": 1,
"week": "6"
},
{
"date": "2017-06-13",
"commits": 1,
"month": 5,
"day": 2,
"week": "6"
},
{
"date": "2017-06-14",
"commits": 1,
"month": 5,
"day": 3,
"week": "6"
},
{
"date": "2017-06-15",
"commits": 1,
"month": 5,
"day": 4,
"week": "6"
},
{
"date": "2017-06-16",
"commits": 1,
"month": 5,
"day": 5,
"week": "6"
},
{
"date": "2017-06-17",
"commits": 1,
"month": 5,
"day": 6,
"week": "6"
},
{
"date": "2017-06-18",
"commits": 1,
"month": 5,
"day": 0,
"week": "7"
},
{
"date": "2017-06-19",
"commits": 1,
"month": 5,
"day": 1,
"week": "7"
},
{
"date": "2017-06-20",
"commits": 1,
"month": 5,
"day": 2,
"week": "7"
},
{
"date": "2017-06-21",
"commits": 2,
"month": 5,
"day": 3,
"week": "7"
},
{
"date": "2017-06-22",
"commits": 6,
"month": 5,
"day": 4,
"week": "7"
},
{
"date": "2017-06-23",
"commits": 9,
"month": 5,
"day": 5,
"week": "7"
},
{
"date": "2017-06-24",
"commits": 3,
"month": 5,
"day": 6,
"week": "7",
"lastWeek": true
},
{
"date": "2017-06-25",
"commits": 3,
"month": 5,
"day": 0,
"week": "8",
"lastWeek": true
},
{
"date": "2017-06-26",
"commits": 1,
"month": 5,
"day": 1,
"week": "8",
"lastWeek": true
},
{
"date": "2017-06-27",
"commits": 7,
"month": 5,
"day": 2,
"week": "8",
"lastWeek": true
},
{
"date": "2017-06-28",
"commits": 7,
"month": 5,
"day": 3,
"week": "8",
"lastWeek": true
},
{
"date": "2017-06-29",
"commits": 12,
"month": 5,
"day": 4,
"week": "8",
"lastWeek": true
},
{
"date": "2017-06-30",
"commits": 9,
"month": 5,
"day": 5,
"week": "8",
"lastWeek": true,
"lastDay": true
},
{
"date": "2017-07-01",
"commits": 4,
"month": 6,
"day": 6,
"week": "8"
},
{
"date": "2017-07-02",
"commits": 1,
"month": 6,
"day": 0,
"week": "9"
},
{
"date": "2017-07-03",
"commits": 2,
"month": 6,
"day": 1,
"week": "9"
},
{
"date": "2017-07-04",
"commits": 7,
"month": 6,
"day": 2,
"week": "9"
},
{
"date": "2017-07-05",
"commits": 6,
"month": 6,
"day": 3,
"week": "9"
},
{
"date": "2017-07-06",
"commits": 1,
"month": 6,
"day": 4,
"week": "9"
},
{
"date": "2017-07-07",
"commits": 6,
"month": 6,
"day": 5,
"week": "9"
},
{
"date": "2017-07-08",
"commits": 1,
"month": 6,
"day": 6,
"week": "9"
},
{
"date": "2017-07-09",
"commits": 1,
"month": 6,
"day": 0,
"week": "10"
},
{
"date": "2017-07-10",
"commits": 1,
"month": 6,
"day": 1,
"week": "10"
},
{
"date": "2017-07-11",
"commits": 8,
"month": 6,
"day": 2,
"week": "10"
},
{
"date": "2017-07-12",
"commits": 5,
"month": 6,
"day": 3,
"week": "10"
},
{
"date": "2017-07-13",
"commits": 7,
"month": 6,
"day": 4,
"week": "10"
},
{
"date": "2017-07-14",
"commits": 4,
"month": 6,
"day": 5,
"week": "10"
},
{
"date": "2017-07-15",
"commits": 4,
"month": 6,
"day": 6,
"week": "10"
},
{
"date": "2017-07-16",
"commits": 2,
"month": 6,
"day": 0,
"week": "11"
},
{
"date": "2017-07-17",
"commits": 1,
"month": 6,
"day": 1,
"week": "11"
},
{
"date": "2017-07-18",
"commits": 10,
"month": 6,
"day": 2,
"week": "11"
},
{
"date": "2017-07-19",
"commits": 14,
"month": 6,
"day": 3,
"week": "11"
},
{
"date": "2017-07-20",
"commits": 6,
"month": 6,
"day": 4,
"week": "11"
},
{
"date": "2017-07-21",
"commits": 1,
"month": 6,
"day": 5,
"week": "11"
},
{
"date": "2017-07-22",
"commits": 6,
"month": 6,
"day": 6,
"week": "11"
},
{
"date": "2017-07-23",
"commits": 1,
"month": 6,
"day": 0,
"week": "12"
},
{
"date": "2017-07-24",
"commits": 1,
"month": 6,
"day": 1,
"week": "12"
},
{
"date": "2017-07-25",
"commits": 1,
"month": 6,
"day": 2,
"week": "12",
"lastWeek": true
},
{
"date": "2017-07-26",
"commits": 1,
"month": 6,
"day": 3,
"week": "12",
"lastWeek": true
},
{
"date": "2017-07-27",
"commits": 9,
"month": 6,
"day": 4,
"week": "12",
"lastWeek": true
},
{
"date": "2017-07-28",
"commits": 1,
"month": 6,
"day": 5,
"week": "12",
"lastWeek": true
},
{
"date": "2017-07-29",
"commits": 15,
"month": 6,
"day": 6,
"week": "12",
"lastWeek": true
},
{
"date": "2017-07-30",
"commits": 1,
"month": 6,
"day": 0,
"week": "13",
"lastWeek": true
},
{
"date": "2017-07-31",
"commits": 1,
"month": 6,
"day": 1,
"week": "13",
"lastWeek": true,
"lastDay": true
},
{
"date": "2017-08-01",
"commits": 15,
"month": 7,
"day": 2,
"week": "13"
},
{
"date": "2017-08-02",
"commits": 11,
"month": 7,
"day": 3,
"week": "13"
},
{
"date": "2017-08-03",
"commits": 3,
"month": 7,
"day": 4,
"week": "13"
},
{
"date": "2017-08-04",
"commits": 6,
"month": 7,
"day": 5,
"week": "13"
},
{
"date": "2017-08-05",
"commits": 1,
"month": 7,
"day": 6,
"week": "13"
},
{
"date": "2017-08-06",
"commits": 4,
"month": 7,
"day": 0,
"week": "14"
},
{
"date": "2017-08-07",
"commits": 1,
"month": 7,
"day": 1,
"week": "14"
},
{
"date": "2017-08-08",
"commits": 10,
"month": 7,
"day": 2,
"week": "14"
},
{
"date": "2017-08-09",
"commits": 11,
"month": 7,
"day": 3,
"week": "14"
},
{
"date": "2017-08-10",
"commits": 8,
"month": 7,
"day": 4,
"week": "14"
},
{
"date": "2017-08-11",
"commits": 12,
"month": 7,
"day": 5,
"week": "14"
},
{
"date": "2017-08-12",
"commits": 8,
"month": 7,
"day": 6,
"week": "14"
},
{
"date": "2017-08-13",
"commits": 2,
"month": 7,
"day": 0,
"week": "15"
},
{
"date": "2017-08-14",
"commits": 1,
"month": 7,
"day": 1,
"week": "15"
},
{
"date": "2017-08-15",
"commits": 18,
"month": 7,
"day": 2,
"week": "15"
},
{
"date": "2017-08-16",
"commits": 21,
"month": 7,
"day": 3,
"week": "15"
},
{
"date": "2017-08-17",
"commits": 7,
"month": 7,
"day": 4,
"week": "15"
},
{
"date": "2017-08-18",
"commits": 10,
"month": 7,
"day": 5,
"week": "15"
},
{
"date": "2017-08-19",
"commits": 8,
"month": 7,
"day": 6,
"week": "15"
},
{
"date": "2017-08-20",
"commits": 1,
"month": 7,
"day": 0,
"week": "16"
},
{
"date": "2017-08-21",
"commits": 1,
"month": 7,
"day": 1,
"week": "16"
},
{
"date": "2017-08-22",
"commits": 1,
"month": 7,
"day": 2,
"week": "16"
},
{
"date": "2017-08-23",
"commits": 7,
"month": 7,
"day": 3,
"week": "16"
},
{
"date": "2017-08-24",
"commits": 11,
"month": 7,
"day": 4,
"week": "16"
},
{
"date": "2017-08-25",
"commits": 3,
"month": 7,
"day": 5,
"week": "16",
"lastWeek": true
},
{
"date": "2017-08-26",
"commits": 3,
"month": 7,
"day": 6,
"week": "16",
"lastWeek": true
},
{
"date": "2017-08-27",
"commits": 1,
"month": 7,
"day": 0,
"week": "17",
"lastWeek": true
},
{
"date": "2017-08-28",
"commits": 2,
"month": 7,
"day": 1,
"week": "17",
"lastWeek": true
},
{
"date": "2017-08-29",
"commits": 7,
"month": 7,
"day": 2,
"week": "17",
"lastWeek": true
},
{
"date": "2017-08-30",
"commits": 20,
"month": 7,
"day": 3,
"week": "17",
"lastWeek": true
},
{
"date": "2017-08-31",
"commits": 15,
"month": 7,
"day": 4,
"week": "17",
"lastWeek": true,
"lastDay": true
},
{
"date": "2017-09-01",
"commits": 1,
"month": 8,
"day": 5,
"week": "17"
},
{
"date": "2017-09-02",
"commits": 4,
"month": 8,
"day": 6,
"week": "17"
},
{
"date": "2017-09-03",
"commits": 1,
"month": 8,
"day": 0,
"week": "18"
},
{
"date": "2017-09-04",
"commits": 1,
"month": 8,
"day": 1,
"week": "18"
},
{
"date": "2017-09-05",
"commits": 8,
"month": 8,
"day": 2,
"week": "18"
},
{
"date": "2017-09-06",
"commits": 6,
"month": 8,
"day": 3,
"week": "18"
},
{
"date": "2017-09-07",
"commits": 5,
"month": 8,
"day": 4,
"week": "18"
},
{
"date": "2017-09-08",
"commits": 1,
"month": 8,
"day": 5,
"week": "18"
},
{
"date": "2017-09-09",
"commits": 25,
"month": 8,
"day": 6,
"week": "18"
},
{
"date": "2017-09-10",
"commits": 1,
"month": 8,
"day": 0,
"week": "19"
},
{
"date": "2017-09-11",
"commits": 1,
"month": 8,
"day": 1,
"week": "19"
},
{
"date": "2017-09-12",
"commits": 1,
"month": 8,
"day": 2,
"week": "19"
},
{
"date": "2017-09-13",
"commits": 9,
"month": 8,
"day": 3,
"week": "19"
},
{
"date": "2017-09-14",
"commits": 1,
"month": 8,
"day": 4,
"week": "19"
},
{
"date": "2017-09-15",
"commits": 1,
"month": 8,
"day": 5,
"week": "19"
},
{
"date": "2017-09-16",
"commits": 4,
"month": 8,
"day": 6,
"week": "19"
},
{
"date": "2017-09-17",
"commits": 1,
"month": 8,
"day": 0,
"week": "20"
},
{
"date": "2017-09-18",
"commits": 1,
"month": 8,
"day": 1,
"week": "20"
},
{
"date": "2017-09-19",
"commits": 4,
"month": 8,
"day": 2,
"week": "20"
},
{
"date": "2017-09-20",
"commits": 8,
"month": 8,
"day": 3,
"week": "20"
},
{
"date": "2017-09-21",
"commits": 10,
"month": 8,
"day": 4,
"week": "20"
},
{
"date": "2017-09-22",
"commits": 9,
"month": 8,
"day": 5,
"week": "20"
},
{
"date": "2017-09-23",
"commits": 5,
"month": 8,
"day": 6,
"week": "20"
},
{
"date": "2017-09-24",
"commits": 1,
"month": 8,
"day": 0,
"week": "21",
"lastWeek": true
},
{
"date": "2017-09-25",
"commits": 1,
"month": 8,
"day": 1,
"week": "21",
"lastWeek": true
},
{
"date": "2017-09-26",
"commits": 1,
"month": 8,
"day": 2,
"week": "21",
"lastWeek": true
},
{
"date": "2017-09-27",
"commits": 7,
"month": 8,
"day": 3,
"week": "21",
"lastWeek": true
},
{
"date": "2017-09-28",
"commits": 13,
"month": 8,
"day": 4,
"week": "21",
"lastWeek": true
},
{
"date": "2017-09-29",
"commits": 1,
"month": 8,
"day": 5,
"week": "21",
"lastWeek": true
},
{
"date": "2017-09-30",
"commits": 7,
"month": 8,
"day": 6,
"week": "21",
"lastWeek": true
},
{
"date": "2017-10-01",
"commits": 1,
"month": 9,
"day": 0,
"week": "22"
},
{
"date": "2017-10-02",
"commits": 1,
"month": 9,
"day": 1,
"week": "22"
},
{
"date": "2017-10-03",
"commits": 7,
"month": 9,
"day": 2,
"week": "22"
},
{
"date": "2017-10-04",
"commits": 2,
"month": 9,
"day": 3,
"week": "22"
},
{
"date": "2017-10-05",
"commits": 14,
"month": 9,
"day": 4,
"week": "22"
},
{
"date": "2017-10-06",
"commits": 9,
"month": 9,
"day": 5,
"week": "22"
},
{
"date": "2017-10-07",
"commits": 12,
"month": 9,
"day": 6,
"week": "22"
},
{
"date": "2017-10-08",
"commits": 1,
"month": 9,
"day": 0,
"week": "23"
},
{
"date": "2017-10-09",
"commits": 1,
"month": 9,
"day": 1,
"week": "23"
},
{
"date": "2017-10-10",
"commits": 13,
"month": 9,
"day": 2,
"week": "23"
},
{
"date": "2017-10-11",
"commits": 12,
"month": 9,
"day": 3,
"week": "23"
},
{
"date": "2017-10-12",
"commits": 1,
"month": 9,
"day": 4,
"week": "23"
},
{
"date": "2017-10-13",
"commits": 1,
"month": 9,
"day": 5,
"week": "23"
},
{
"date": "2017-10-14",
"commits": 1,
"month": 9,
"day": 6,
"week": "23"
},
{
"date": "2017-10-15",
"commits": 1,
"month": 9,
"day": 0,
"week": "24"
},
{
"date": "2017-10-16",
"commits": 1,
"month": 9,
"day": 1,
"week": "24"
},
{
"date": "2017-10-17",
"commits": 1,
"month": 9,
"day": 2,
"week": "24"
},
{
"date": "2017-10-18",
"commits": 1,
"month": 9,
"day": 3,
"week": "24"
},
{
"date": "2017-10-19",
"commits": 1,
"month": 9,
"day": 4,
"week": "24"
},
{
"date": "2017-10-20",
"commits": 1,
"month": 9,
"day": 5,
"week": "24"
},
{
"date": "2017-10-21",
"commits": 1,
"month": 9,
"day": 6,
"week": "24"
},
{
"date": "2017-10-22",
"commits": 1,
"month": 9,
"day": 0,
"week": "25"
},
{
"date": "2017-10-23",
"commits": 1,
"month": 9,
"day": 1,
"week": "25"
},
{
"date": "2017-10-24",
"commits": 10,
"month": 9,
"day": 2,
"week": "25"
},
{
"date": "2017-10-25",
"commits": 4,
"month": 9,
"day": 3,
"week": "25"
},
{
"date": "2017-10-26",
"commits": 7,
"month": 9,
"day": 4,
"week": "25"
},
{
"date": "2017-10-27",
"commits": 3,
"month": 9,
"day": 5,
"week": "25"
},
{
"date": "2017-10-28",
"commits": 14,
"month": 9,
"day": 6,
"week": "25"
},
{
"date": "2017-10-29",
"commits": 1,
"month": 9,
"day": 0,
"week": "26"
},
{
"date": "2017-10-30",
"commits": 1,
"month": 9,
"day": 1,
"week": "26"
},
{
"date": "2017-10-31",
"commits": 5,
"month": 9,
"day": 2,
"week": "26",
"lastDay": true
}
]
|
{
"pile_set_name": "Github"
}
|
/* Realtek PCI-Express Memstick Card Interface driver
*
* Copyright(c) 2009-2013 Realtek Semiconductor Corp. All rights reserved.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2, or (at your option) any
* later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, see <http://www.gnu.org/licenses/>.
*
* Author:
* Wei WANG <wei_wang@realsil.com.cn>
*/
#include <linux/module.h>
#include <linux/highmem.h>
#include <linux/delay.h>
#include <linux/platform_device.h>
#include <linux/memstick.h>
#include <linux/mfd/rtsx_pci.h>
#include <asm/unaligned.h>
struct realtek_pci_ms {
struct platform_device *pdev;
struct rtsx_pcr *pcr;
struct memstick_host *msh;
struct memstick_request *req;
struct mutex host_mutex;
struct work_struct handle_req;
u8 ssc_depth;
unsigned int clock;
unsigned char ifmode;
bool eject;
};
static inline struct device *ms_dev(struct realtek_pci_ms *host)
{
return &(host->pdev->dev);
}
static inline void ms_clear_error(struct realtek_pci_ms *host)
{
rtsx_pci_write_register(host->pcr, CARD_STOP,
MS_STOP | MS_CLR_ERR, MS_STOP | MS_CLR_ERR);
}
#ifdef DEBUG
static void ms_print_debug_regs(struct realtek_pci_ms *host)
{
struct rtsx_pcr *pcr = host->pcr;
u16 i;
u8 *ptr;
/* Print MS host internal registers */
rtsx_pci_init_cmd(pcr);
for (i = 0xFD40; i <= 0xFD44; i++)
rtsx_pci_add_cmd(pcr, READ_REG_CMD, i, 0, 0);
for (i = 0xFD52; i <= 0xFD69; i++)
rtsx_pci_add_cmd(pcr, READ_REG_CMD, i, 0, 0);
rtsx_pci_send_cmd(pcr, 100);
ptr = rtsx_pci_get_cmd_data(pcr);
for (i = 0xFD40; i <= 0xFD44; i++)
dev_dbg(ms_dev(host), "0x%04X: 0x%02x\n", i, *(ptr++));
for (i = 0xFD52; i <= 0xFD69; i++)
dev_dbg(ms_dev(host), "0x%04X: 0x%02x\n", i, *(ptr++));
}
#else
#define ms_print_debug_regs(host)
#endif
static int ms_power_on(struct realtek_pci_ms *host)
{
struct rtsx_pcr *pcr = host->pcr;
int err;
rtsx_pci_init_cmd(pcr);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, CARD_SELECT, 0x07, MS_MOD_SEL);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, CARD_SHARE_MODE,
CARD_SHARE_MASK, CARD_SHARE_48_MS);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, CARD_CLK_EN,
MS_CLK_EN, MS_CLK_EN);
err = rtsx_pci_send_cmd(pcr, 100);
if (err < 0)
return err;
err = rtsx_pci_card_pull_ctl_enable(pcr, RTSX_MS_CARD);
if (err < 0)
return err;
err = rtsx_pci_card_power_on(pcr, RTSX_MS_CARD);
if (err < 0)
return err;
/* Wait ms power stable */
msleep(150);
err = rtsx_pci_write_register(pcr, CARD_OE,
MS_OUTPUT_EN, MS_OUTPUT_EN);
if (err < 0)
return err;
return 0;
}
static int ms_power_off(struct realtek_pci_ms *host)
{
struct rtsx_pcr *pcr = host->pcr;
int err;
rtsx_pci_init_cmd(pcr);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, CARD_CLK_EN, MS_CLK_EN, 0);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, CARD_OE, MS_OUTPUT_EN, 0);
err = rtsx_pci_send_cmd(pcr, 100);
if (err < 0)
return err;
err = rtsx_pci_card_power_off(pcr, RTSX_MS_CARD);
if (err < 0)
return err;
return rtsx_pci_card_pull_ctl_disable(pcr, RTSX_MS_CARD);
}
static int ms_transfer_data(struct realtek_pci_ms *host, unsigned char data_dir,
u8 tpc, u8 cfg, struct scatterlist *sg)
{
struct rtsx_pcr *pcr = host->pcr;
int err;
unsigned int length = sg->length;
u16 sec_cnt = (u16)(length / 512);
u8 val, trans_mode, dma_dir;
struct memstick_dev *card = host->msh->card;
bool pro_card = card->id.type == MEMSTICK_TYPE_PRO;
dev_dbg(ms_dev(host), "%s: tpc = 0x%02x, data_dir = %s, length = %d\n",
__func__, tpc, (data_dir == READ) ? "READ" : "WRITE",
length);
if (data_dir == READ) {
dma_dir = DMA_DIR_FROM_CARD;
trans_mode = pro_card ? MS_TM_AUTO_READ : MS_TM_NORMAL_READ;
} else {
dma_dir = DMA_DIR_TO_CARD;
trans_mode = pro_card ? MS_TM_AUTO_WRITE : MS_TM_NORMAL_WRITE;
}
rtsx_pci_init_cmd(pcr);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, MS_TPC, 0xFF, tpc);
if (pro_card) {
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, MS_SECTOR_CNT_H,
0xFF, (u8)(sec_cnt >> 8));
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, MS_SECTOR_CNT_L,
0xFF, (u8)sec_cnt);
}
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, MS_TRANS_CFG, 0xFF, cfg);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, IRQSTAT0,
DMA_DONE_INT, DMA_DONE_INT);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, DMATC3, 0xFF, (u8)(length >> 24));
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, DMATC2, 0xFF, (u8)(length >> 16));
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, DMATC1, 0xFF, (u8)(length >> 8));
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, DMATC0, 0xFF, (u8)length);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, DMACTL,
0x03 | DMA_PACK_SIZE_MASK, dma_dir | DMA_EN | DMA_512);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, CARD_DATA_SOURCE,
0x01, RING_BUFFER);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, MS_TRANSFER,
0xFF, MS_TRANSFER_START | trans_mode);
rtsx_pci_add_cmd(pcr, CHECK_REG_CMD, MS_TRANSFER,
MS_TRANSFER_END, MS_TRANSFER_END);
rtsx_pci_send_cmd_no_wait(pcr);
err = rtsx_pci_transfer_data(pcr, sg, 1, data_dir == READ, 10000);
if (err < 0) {
ms_clear_error(host);
return err;
}
rtsx_pci_read_register(pcr, MS_TRANS_CFG, &val);
if (pro_card) {
if (val & (MS_INT_CMDNK | MS_INT_ERR |
MS_CRC16_ERR | MS_RDY_TIMEOUT))
return -EIO;
} else {
if (val & (MS_CRC16_ERR | MS_RDY_TIMEOUT))
return -EIO;
}
return 0;
}
static int ms_write_bytes(struct realtek_pci_ms *host, u8 tpc,
u8 cfg, u8 cnt, u8 *data, u8 *int_reg)
{
struct rtsx_pcr *pcr = host->pcr;
int err, i;
dev_dbg(ms_dev(host), "%s: tpc = 0x%02x\n", __func__, tpc);
if (!data)
return -EINVAL;
rtsx_pci_init_cmd(pcr);
for (i = 0; i < cnt; i++)
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD,
PPBUF_BASE2 + i, 0xFF, data[i]);
if (cnt % 2)
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD,
PPBUF_BASE2 + i, 0xFF, 0xFF);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, MS_TPC, 0xFF, tpc);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, MS_BYTE_CNT, 0xFF, cnt);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, MS_TRANS_CFG, 0xFF, cfg);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, CARD_DATA_SOURCE,
0x01, PINGPONG_BUFFER);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, MS_TRANSFER,
0xFF, MS_TRANSFER_START | MS_TM_WRITE_BYTES);
rtsx_pci_add_cmd(pcr, CHECK_REG_CMD, MS_TRANSFER,
MS_TRANSFER_END, MS_TRANSFER_END);
if (int_reg)
rtsx_pci_add_cmd(pcr, READ_REG_CMD, MS_TRANS_CFG, 0, 0);
err = rtsx_pci_send_cmd(pcr, 5000);
if (err < 0) {
u8 val;
rtsx_pci_read_register(pcr, MS_TRANS_CFG, &val);
dev_dbg(ms_dev(host), "MS_TRANS_CFG: 0x%02x\n", val);
if (int_reg)
*int_reg = val & 0x0F;
ms_print_debug_regs(host);
ms_clear_error(host);
if (!(tpc & 0x08)) {
if (val & MS_CRC16_ERR)
return -EIO;
} else {
if (!(val & 0x80)) {
if (val & (MS_INT_ERR | MS_INT_CMDNK))
return -EIO;
}
}
return -ETIMEDOUT;
}
if (int_reg) {
u8 *ptr = rtsx_pci_get_cmd_data(pcr) + 1;
*int_reg = *ptr & 0x0F;
}
return 0;
}
static int ms_read_bytes(struct realtek_pci_ms *host, u8 tpc,
u8 cfg, u8 cnt, u8 *data, u8 *int_reg)
{
struct rtsx_pcr *pcr = host->pcr;
int err, i;
u8 *ptr;
dev_dbg(ms_dev(host), "%s: tpc = 0x%02x\n", __func__, tpc);
if (!data)
return -EINVAL;
rtsx_pci_init_cmd(pcr);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, MS_TPC, 0xFF, tpc);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, MS_BYTE_CNT, 0xFF, cnt);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, MS_TRANS_CFG, 0xFF, cfg);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, CARD_DATA_SOURCE,
0x01, PINGPONG_BUFFER);
rtsx_pci_add_cmd(pcr, WRITE_REG_CMD, MS_TRANSFER,
0xFF, MS_TRANSFER_START | MS_TM_READ_BYTES);
rtsx_pci_add_cmd(pcr, CHECK_REG_CMD, MS_TRANSFER,
MS_TRANSFER_END, MS_TRANSFER_END);
for (i = 0; i < cnt - 1; i++)
rtsx_pci_add_cmd(pcr, READ_REG_CMD, PPBUF_BASE2 + i, 0, 0);
if (cnt % 2)
rtsx_pci_add_cmd(pcr, READ_REG_CMD, PPBUF_BASE2 + cnt, 0, 0);
else
rtsx_pci_add_cmd(pcr, READ_REG_CMD,
PPBUF_BASE2 + cnt - 1, 0, 0);
if (int_reg)
rtsx_pci_add_cmd(pcr, READ_REG_CMD, MS_TRANS_CFG, 0, 0);
err = rtsx_pci_send_cmd(pcr, 5000);
if (err < 0) {
u8 val;
rtsx_pci_read_register(pcr, MS_TRANS_CFG, &val);
dev_dbg(ms_dev(host), "MS_TRANS_CFG: 0x%02x\n", val);
if (int_reg)
*int_reg = val & 0x0F;
ms_print_debug_regs(host);
ms_clear_error(host);
if (!(tpc & 0x08)) {
if (val & MS_CRC16_ERR)
return -EIO;
} else {
if (!(val & 0x80)) {
if (val & (MS_INT_ERR | MS_INT_CMDNK))
return -EIO;
}
}
return -ETIMEDOUT;
}
ptr = rtsx_pci_get_cmd_data(pcr) + 1;
for (i = 0; i < cnt; i++)
data[i] = *ptr++;
if (int_reg)
*int_reg = *ptr & 0x0F;
return 0;
}
static int rtsx_pci_ms_issue_cmd(struct realtek_pci_ms *host)
{
struct memstick_request *req = host->req;
int err = 0;
u8 cfg = 0, int_reg;
dev_dbg(ms_dev(host), "%s\n", __func__);
if (req->need_card_int) {
if (host->ifmode != MEMSTICK_SERIAL)
cfg = WAIT_INT;
}
if (req->long_data) {
err = ms_transfer_data(host, req->data_dir,
req->tpc, cfg, &(req->sg));
} else {
if (req->data_dir == READ) {
err = ms_read_bytes(host, req->tpc, cfg,
req->data_len, req->data, &int_reg);
} else {
err = ms_write_bytes(host, req->tpc, cfg,
req->data_len, req->data, &int_reg);
}
}
if (err < 0)
return err;
if (req->need_card_int && (host->ifmode == MEMSTICK_SERIAL)) {
err = ms_read_bytes(host, MS_TPC_GET_INT,
NO_WAIT_INT, 1, &int_reg, NULL);
if (err < 0)
return err;
}
if (req->need_card_int) {
dev_dbg(ms_dev(host), "int_reg: 0x%02x\n", int_reg);
if (int_reg & MS_INT_CMDNK)
req->int_reg |= MEMSTICK_INT_CMDNAK;
if (int_reg & MS_INT_BREQ)
req->int_reg |= MEMSTICK_INT_BREQ;
if (int_reg & MS_INT_ERR)
req->int_reg |= MEMSTICK_INT_ERR;
if (int_reg & MS_INT_CED)
req->int_reg |= MEMSTICK_INT_CED;
}
return 0;
}
static void rtsx_pci_ms_handle_req(struct work_struct *work)
{
struct realtek_pci_ms *host = container_of(work,
struct realtek_pci_ms, handle_req);
struct rtsx_pcr *pcr = host->pcr;
struct memstick_host *msh = host->msh;
int rc;
mutex_lock(&pcr->pcr_mutex);
rtsx_pci_start_run(pcr);
rtsx_pci_switch_clock(host->pcr, host->clock, host->ssc_depth,
false, true, false);
rtsx_pci_write_register(pcr, CARD_SELECT, 0x07, MS_MOD_SEL);
rtsx_pci_write_register(pcr, CARD_SHARE_MODE,
CARD_SHARE_MASK, CARD_SHARE_48_MS);
if (!host->req) {
do {
rc = memstick_next_req(msh, &host->req);
dev_dbg(ms_dev(host), "next req %d\n", rc);
if (!rc)
host->req->error = rtsx_pci_ms_issue_cmd(host);
} while (!rc);
}
mutex_unlock(&pcr->pcr_mutex);
}
static void rtsx_pci_ms_request(struct memstick_host *msh)
{
struct realtek_pci_ms *host = memstick_priv(msh);
dev_dbg(ms_dev(host), "--> %s\n", __func__);
if (rtsx_pci_card_exclusive_check(host->pcr, RTSX_MS_CARD))
return;
schedule_work(&host->handle_req);
}
static int rtsx_pci_ms_set_param(struct memstick_host *msh,
enum memstick_param param, int value)
{
struct realtek_pci_ms *host = memstick_priv(msh);
struct rtsx_pcr *pcr = host->pcr;
unsigned int clock = 0;
u8 ssc_depth = 0;
int err;
dev_dbg(ms_dev(host), "%s: param = %d, value = %d\n",
__func__, param, value);
err = rtsx_pci_card_exclusive_check(host->pcr, RTSX_MS_CARD);
if (err)
return err;
switch (param) {
case MEMSTICK_POWER:
if (value == MEMSTICK_POWER_ON)
err = ms_power_on(host);
else if (value == MEMSTICK_POWER_OFF)
err = ms_power_off(host);
else
return -EINVAL;
break;
case MEMSTICK_INTERFACE:
if (value == MEMSTICK_SERIAL) {
clock = 19000000;
ssc_depth = RTSX_SSC_DEPTH_500K;
err = rtsx_pci_write_register(pcr, MS_CFG, 0x58,
MS_BUS_WIDTH_1 | PUSH_TIME_DEFAULT);
if (err < 0)
return err;
} else if (value == MEMSTICK_PAR4) {
clock = 39000000;
ssc_depth = RTSX_SSC_DEPTH_1M;
err = rtsx_pci_write_register(pcr, MS_CFG,
0x58, MS_BUS_WIDTH_4 | PUSH_TIME_ODD);
if (err < 0)
return err;
} else {
return -EINVAL;
}
err = rtsx_pci_switch_clock(pcr, clock,
ssc_depth, false, true, false);
if (err < 0)
return err;
host->ssc_depth = ssc_depth;
host->clock = clock;
host->ifmode = value;
break;
}
return 0;
}
#ifdef CONFIG_PM
static int rtsx_pci_ms_suspend(struct platform_device *pdev, pm_message_t state)
{
struct realtek_pci_ms *host = platform_get_drvdata(pdev);
struct memstick_host *msh = host->msh;
dev_dbg(ms_dev(host), "--> %s\n", __func__);
memstick_suspend_host(msh);
return 0;
}
static int rtsx_pci_ms_resume(struct platform_device *pdev)
{
struct realtek_pci_ms *host = platform_get_drvdata(pdev);
struct memstick_host *msh = host->msh;
dev_dbg(ms_dev(host), "--> %s\n", __func__);
memstick_resume_host(msh);
return 0;
}
#else /* CONFIG_PM */
#define rtsx_pci_ms_suspend NULL
#define rtsx_pci_ms_resume NULL
#endif /* CONFIG_PM */
static void rtsx_pci_ms_card_event(struct platform_device *pdev)
{
struct realtek_pci_ms *host = platform_get_drvdata(pdev);
memstick_detect_change(host->msh);
}
static int rtsx_pci_ms_drv_probe(struct platform_device *pdev)
{
struct memstick_host *msh;
struct realtek_pci_ms *host;
struct rtsx_pcr *pcr;
struct pcr_handle *handle = pdev->dev.platform_data;
int rc;
if (!handle)
return -ENXIO;
pcr = handle->pcr;
if (!pcr)
return -ENXIO;
dev_dbg(&(pdev->dev),
": Realtek PCI-E Memstick controller found\n");
msh = memstick_alloc_host(sizeof(*host), &pdev->dev);
if (!msh)
return -ENOMEM;
host = memstick_priv(msh);
host->pcr = pcr;
host->msh = msh;
host->pdev = pdev;
platform_set_drvdata(pdev, host);
pcr->slots[RTSX_MS_CARD].p_dev = pdev;
pcr->slots[RTSX_MS_CARD].card_event = rtsx_pci_ms_card_event;
mutex_init(&host->host_mutex);
INIT_WORK(&host->handle_req, rtsx_pci_ms_handle_req);
msh->request = rtsx_pci_ms_request;
msh->set_param = rtsx_pci_ms_set_param;
msh->caps = MEMSTICK_CAP_PAR4;
rc = memstick_add_host(msh);
if (rc) {
memstick_free_host(msh);
return rc;
}
return 0;
}
static int rtsx_pci_ms_drv_remove(struct platform_device *pdev)
{
struct realtek_pci_ms *host = platform_get_drvdata(pdev);
struct rtsx_pcr *pcr;
struct memstick_host *msh;
int rc;
if (!host)
return 0;
pcr = host->pcr;
pcr->slots[RTSX_MS_CARD].p_dev = NULL;
pcr->slots[RTSX_MS_CARD].card_event = NULL;
msh = host->msh;
host->eject = true;
cancel_work_sync(&host->handle_req);
mutex_lock(&host->host_mutex);
if (host->req) {
dev_dbg(&(pdev->dev),
"%s: Controller removed during transfer\n",
dev_name(&msh->dev));
rtsx_pci_complete_unfinished_transfer(pcr);
host->req->error = -ENOMEDIUM;
do {
rc = memstick_next_req(msh, &host->req);
if (!rc)
host->req->error = -ENOMEDIUM;
} while (!rc);
}
mutex_unlock(&host->host_mutex);
memstick_remove_host(msh);
memstick_free_host(msh);
dev_dbg(&(pdev->dev),
": Realtek PCI-E Memstick controller has been removed\n");
return 0;
}
static struct platform_device_id rtsx_pci_ms_ids[] = {
{
.name = DRV_NAME_RTSX_PCI_MS,
}, {
/* sentinel */
}
};
MODULE_DEVICE_TABLE(platform, rtsx_pci_ms_ids);
static struct platform_driver rtsx_pci_ms_driver = {
.probe = rtsx_pci_ms_drv_probe,
.remove = rtsx_pci_ms_drv_remove,
.id_table = rtsx_pci_ms_ids,
.suspend = rtsx_pci_ms_suspend,
.resume = rtsx_pci_ms_resume,
.driver = {
.name = DRV_NAME_RTSX_PCI_MS,
},
};
module_platform_driver(rtsx_pci_ms_driver);
MODULE_LICENSE("GPL");
MODULE_AUTHOR("Wei WANG <wei_wang@realsil.com.cn>");
MODULE_DESCRIPTION("Realtek PCI-E Memstick Card Host Driver");
|
{
"pile_set_name": "Github"
}
|
<?php
/**
* This file is part of the ramsey/uuid library
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
* @copyright Copyright (c) Ben Ramsey <ben@benramsey.com>
* @license http://opensource.org/licenses/MIT MIT
* @link https://benramsey.com/projects/ramsey-uuid/ Documentation
* @link https://packagist.org/packages/ramsey/uuid Packagist
* @link https://github.com/ramsey/uuid GitHub
*/
namespace Ramsey\Uuid\Provider\Node;
use Ramsey\Uuid\Provider\NodeProviderInterface;
/**
* SystemNodeProvider provides functionality to get the system node ID (MAC
* address) using external system calls
*/
class SystemNodeProvider implements NodeProviderInterface
{
/**
* Returns the system node ID
*
* @return string|false System node ID as a hexadecimal string, or false if it is not found
*/
public function getNode()
{
static $node = null;
if ($node !== null) {
return $node;
}
$pattern = '/[^:]([0-9A-Fa-f]{2}([:-])[0-9A-Fa-f]{2}(\2[0-9A-Fa-f]{2}){4})[^:]/';
$matches = array();
// first try a linux specific way
$node = $this->getSysfs();
// Search the ifconfig output for all MAC addresses and return
// the first one found
if ($node === false) {
if (preg_match_all($pattern, $this->getIfconfig(), $matches, PREG_PATTERN_ORDER)) {
$node = $matches[1][0];
}
}
if ($node !== false) {
$node = str_replace([':', '-'], '', $node);
}
return $node;
}
/**
* Returns the network interface configuration for the system
*
* @codeCoverageIgnore
* @return string
*/
protected function getIfconfig()
{
if (strpos(strtolower(ini_get('disable_functions')), 'passthru') !== false) {
return '';
}
ob_start();
switch (strtoupper(substr(php_uname('a'), 0, 3))) {
case 'WIN':
passthru('ipconfig /all 2>&1');
break;
case 'DAR':
passthru('ifconfig 2>&1');
break;
case 'FRE':
passthru('netstat -i -f link 2>&1');
break;
case 'LIN':
default:
passthru('netstat -ie 2>&1');
break;
}
return ob_get_clean();
}
/**
* Returns mac address from the first system interface via the sysfs interface
*
* @return string|bool
*/
protected function getSysfs()
{
$mac = false;
if (strtoupper(php_uname('s')) === 'LINUX') {
$addressPaths = glob('/sys/class/net/*/address', GLOB_NOSORT);
if (empty($addressPaths)) {
return false;
}
array_walk($addressPaths, function ($addressPath) use (&$macs) {
$macs[] = file_get_contents($addressPath);
});
$macs = array_map('trim', $macs);
// remove invalid entries
$macs = array_filter($macs, function ($mac) {
return
// localhost adapter
$mac !== '00:00:00:00:00:00' &&
// must match mac adress
preg_match('/^([0-9a-f]{2}:){5}[0-9a-f]{2}$/i', $mac);
});
$mac = reset($macs);
}
return $mac;
}
}
|
{
"pile_set_name": "Github"
}
|
{
"created_at": "2015-02-27T22:29:09.078252",
"description": "A beanstalkd worker package",
"fork": false,
"full_name": "eramus/worker",
"language": "Go",
"updated_at": "2015-02-27T23:44:01.137023"
}
|
{
"pile_set_name": "Github"
}
|
/* Class = "UILabel"; text = "Trust"; ObjectID = "4kz-Xh-2Ui"; */
"4kz-Xh-2Ui.text" = "Încredere";
/* Class = "UILabel"; text = "jid"; ObjectID = "6bI-Lh-Qhb"; */
"6bI-Lh-Qhb.text" = "ID";
/* Class = "UILabel"; text = "Device:"; ObjectID = "aTq-lO-Xbf"; */
"aTq-lO-Xbf.text" = "Dispozitiv:";
/* Class = "UILabel"; text = "deviceid"; ObjectID = "BIp-1h-2Q5"; */
"BIp-1h-2Q5.text" = "ID dispozitiv";
/* Class = "UILabel"; text = "name"; ObjectID = "DFP-48-Sgb"; */
"DFP-48-Sgb.text" = "nume";
/* Class = "UITextView"; text = "log_placeholder"; ObjectID = "esK-jI-cAB"; */
"esK-jI-cAB.text" = "log_placeholder";
/* Class = "UILabel"; text = "05C8299D ABF17279 01A72DC6 4231E407 B90BEF9D 3746C910 01C5444C 47DC66E6"; ObjectID = "fop-Br-WiN"; */
"fop-Br-WiN.text" = "05C8299D ABF17279 01A72DC6 4231E407 B90BEF9D 3746C910 01C5444C 47DC66E6";
/* Class = "UILabel"; text = "Calling"; ObjectID = "ggA-Jy-Lki"; */
"ggA-Jy-Lki.text" = "Apelare";
/* Class = "UILabel"; text = "Not a Contact, can't see keys"; ObjectID = "hFV-Qh-Uid"; */
"hFV-Qh-Uid.text" = "Nu este o persoană de contact, nu se pot vedea cheile";
/* Class = "UILabel"; text = "lastInteraction"; ObjectID = "ksn-PV-E6O"; */
"ksn-PV-E6O.text" = "ultima interacțiune";
/* Class = "UIBarButtonItem"; title = "Close"; ObjectID = "urj-qd-yc7"; */
"urj-qd-yc7.title" = "Închide";
/* Class = "UIButton"; normalTitle = "Cancel"; ObjectID = "X6h-fs-C64"; */
"X6h-fs-C64.normalTitle" = "Anulare";
|
{
"pile_set_name": "Github"
}
|
// Check miscellaneous Objective-C sdk migration options.
// rdar://19994452
// RUN: %clang -objcmt-migrate-property-dot-syntax -target x86_64-apple-darwin10 -S -### %s \
// RUN: -arch x86_64 2> %t
// RUN: FileCheck < %t %s
// CHECK: "-cc1"
// CHECK: -objcmt-migrate-property-dot-syntax
|
{
"pile_set_name": "Github"
}
|
// Copyright (c) 2014-2018 The Bitcoin Core developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include <test/test_bitcoin.h>
#include <util/strencodings.h>
#include <wallet/crypter.h>
#include <vector>
#include <boost/test/unit_test.hpp>
BOOST_FIXTURE_TEST_SUITE(wallet_crypto_tests, BasicTestingSetup)
class TestCrypter
{
public:
static void TestPassphraseSingle(const std::vector<unsigned char>& vchSalt, const SecureString& passphrase, uint32_t rounds,
const std::vector<unsigned char>& correctKey = std::vector<unsigned char>(),
const std::vector<unsigned char>& correctIV=std::vector<unsigned char>())
{
CCrypter crypt;
crypt.SetKeyFromPassphrase(passphrase, vchSalt, rounds, 0);
if(!correctKey.empty())
BOOST_CHECK_MESSAGE(memcmp(crypt.vchKey.data(), correctKey.data(), crypt.vchKey.size()) == 0, \
HexStr(crypt.vchKey.begin(), crypt.vchKey.end()) + std::string(" != ") + HexStr(correctKey.begin(), correctKey.end()));
if(!correctIV.empty())
BOOST_CHECK_MESSAGE(memcmp(crypt.vchIV.data(), correctIV.data(), crypt.vchIV.size()) == 0,
HexStr(crypt.vchIV.begin(), crypt.vchIV.end()) + std::string(" != ") + HexStr(correctIV.begin(), correctIV.end()));
}
static void TestPassphrase(const std::vector<unsigned char>& vchSalt, const SecureString& passphrase, uint32_t rounds,
const std::vector<unsigned char>& correctKey = std::vector<unsigned char>(),
const std::vector<unsigned char>& correctIV=std::vector<unsigned char>())
{
TestPassphraseSingle(vchSalt, passphrase, rounds, correctKey, correctIV);
for(SecureString::const_iterator i(passphrase.begin()); i != passphrase.end(); ++i)
TestPassphraseSingle(vchSalt, SecureString(i, passphrase.end()), rounds);
}
static void TestDecrypt(const CCrypter& crypt, const std::vector<unsigned char>& vchCiphertext, \
const std::vector<unsigned char>& vchPlaintext = std::vector<unsigned char>())
{
CKeyingMaterial vchDecrypted;
crypt.Decrypt(vchCiphertext, vchDecrypted);
if (vchPlaintext.size())
BOOST_CHECK(CKeyingMaterial(vchPlaintext.begin(), vchPlaintext.end()) == vchDecrypted);
}
static void TestEncryptSingle(const CCrypter& crypt, const CKeyingMaterial& vchPlaintext,
const std::vector<unsigned char>& vchCiphertextCorrect = std::vector<unsigned char>())
{
std::vector<unsigned char> vchCiphertext;
crypt.Encrypt(vchPlaintext, vchCiphertext);
if (!vchCiphertextCorrect.empty())
BOOST_CHECK(vchCiphertext == vchCiphertextCorrect);
const std::vector<unsigned char> vchPlaintext2(vchPlaintext.begin(), vchPlaintext.end());
TestDecrypt(crypt, vchCiphertext, vchPlaintext2);
}
static void TestEncrypt(const CCrypter& crypt, const std::vector<unsigned char>& vchPlaintextIn, \
const std::vector<unsigned char>& vchCiphertextCorrect = std::vector<unsigned char>())
{
TestEncryptSingle(crypt, CKeyingMaterial(vchPlaintextIn.begin(), vchPlaintextIn.end()), vchCiphertextCorrect);
for(std::vector<unsigned char>::const_iterator i(vchPlaintextIn.begin()); i != vchPlaintextIn.end(); ++i)
TestEncryptSingle(crypt, CKeyingMaterial(i, vchPlaintextIn.end()));
}
};
BOOST_AUTO_TEST_CASE(passphrase) {
// These are expensive.
TestCrypter::TestPassphrase(ParseHex("0000deadbeef0000"), "test", 25000, \
ParseHex("fc7aba077ad5f4c3a0988d8daa4810d0d4a0e3bcb53af662998898f33df0556a"), \
ParseHex("cf2f2691526dd1aa220896fb8bf7c369"));
std::string hash(GetRandHash().ToString());
std::vector<unsigned char> vchSalt(8);
GetRandBytes(vchSalt.data(), vchSalt.size());
uint32_t rounds = InsecureRand32();
if (rounds > 30000)
rounds = 30000;
TestCrypter::TestPassphrase(vchSalt, SecureString(hash.begin(), hash.end()), rounds);
}
BOOST_AUTO_TEST_CASE(encrypt) {
std::vector<unsigned char> vchSalt = ParseHex("0000deadbeef0000");
BOOST_CHECK(vchSalt.size() == WALLET_CRYPTO_SALT_SIZE);
CCrypter crypt;
crypt.SetKeyFromPassphrase("passphrase", vchSalt, 25000, 0);
TestCrypter::TestEncrypt(crypt, ParseHex("22bcade09ac03ff6386914359cfe885cfeb5f77ff0d670f102f619687453b29d"));
for (int i = 0; i != 100; i++)
{
uint256 hash(GetRandHash());
TestCrypter::TestEncrypt(crypt, std::vector<unsigned char>(hash.begin(), hash.end()));
}
}
BOOST_AUTO_TEST_CASE(decrypt) {
std::vector<unsigned char> vchSalt = ParseHex("0000deadbeef0000");
BOOST_CHECK(vchSalt.size() == WALLET_CRYPTO_SALT_SIZE);
CCrypter crypt;
crypt.SetKeyFromPassphrase("passphrase", vchSalt, 25000, 0);
// Some corner cases the came up while testing
TestCrypter::TestDecrypt(crypt,ParseHex("795643ce39d736088367822cdc50535ec6f103715e3e48f4f3b1a60a08ef59ca"));
TestCrypter::TestDecrypt(crypt,ParseHex("de096f4a8f9bd97db012aa9d90d74de8cdea779c3ee8bc7633d8b5d6da703486"));
TestCrypter::TestDecrypt(crypt,ParseHex("32d0a8974e3afd9c6c3ebf4d66aa4e6419f8c173de25947f98cf8b7ace49449c"));
TestCrypter::TestDecrypt(crypt,ParseHex("e7c055cca2faa78cb9ac22c9357a90b4778ded9b2cc220a14cea49f931e596ea"));
TestCrypter::TestDecrypt(crypt,ParseHex("b88efddd668a6801d19516d6830da4ae9811988ccbaf40df8fbb72f3f4d335fd"));
TestCrypter::TestDecrypt(crypt,ParseHex("8cae76aa6a43694e961ebcb28c8ca8f8540b84153d72865e8561ddd93fa7bfa9"));
for (int i = 0; i != 100; i++)
{
uint256 hash(GetRandHash());
TestCrypter::TestDecrypt(crypt, std::vector<unsigned char>(hash.begin(), hash.end()));
}
}
BOOST_AUTO_TEST_SUITE_END()
|
{
"pile_set_name": "Github"
}
|
fileFormatVersion: 2
guid: e27a7e1e72ab4db4c92267ac625a2926
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
|
{
"pile_set_name": "Github"
}
|
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2019_07_01;
import com.microsoft.azure.arm.collection.SupportsCreating;
import com.microsoft.azure.arm.resources.collection.SupportsDeletingByResourceGroup;
import com.microsoft.azure.arm.resources.collection.SupportsBatchDeletion;
import com.microsoft.azure.arm.resources.collection.SupportsGettingByResourceGroup;
import rx.Observable;
import com.microsoft.azure.arm.resources.collection.SupportsListingByResourceGroup;
import com.microsoft.azure.management.network.v2019_07_01.implementation.ConnectionSharedKeyInner;
import com.microsoft.azure.management.network.v2019_07_01.implementation.VirtualNetworkGatewayConnectionsInner;
import com.microsoft.azure.arm.model.HasInner;
/**
* Type representing VirtualNetworkGatewayConnections.
*/
public interface VirtualNetworkGatewayConnections extends SupportsCreating<VirtualNetworkGatewayConnection.DefinitionStages.Blank>, SupportsDeletingByResourceGroup, SupportsBatchDeletion, SupportsGettingByResourceGroup<VirtualNetworkGatewayConnection>, SupportsListingByResourceGroup<VirtualNetworkGatewayConnection>, HasInner<VirtualNetworkGatewayConnectionsInner> {
/**
* The Put VirtualNetworkGatewayConnectionSharedKey operation sets the virtual network gateway connection shared key for passed virtual network gateway connection in the specified resource group through Network resource provider.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkGatewayConnectionName The virtual network gateway connection name.
* @param parameters Parameters supplied to the Begin Set Virtual Network Gateway connection Shared key operation throughNetwork resource provider.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Observable<ConnectionSharedKey> setSharedKeyAsync(String resourceGroupName, String virtualNetworkGatewayConnectionName, ConnectionSharedKeyInner parameters);
/**
* The Get VirtualNetworkGatewayConnectionSharedKey operation retrieves information about the specified virtual network gateway connection shared key through Network resource provider.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkGatewayConnectionName The virtual network gateway connection shared key name.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Observable<ConnectionSharedKey> getSharedKeyAsync(String resourceGroupName, String virtualNetworkGatewayConnectionName);
/**
* The VirtualNetworkGatewayConnectionResetSharedKey operation resets the virtual network gateway connection shared key for passed virtual network gateway connection in the specified resource group through Network resource provider.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkGatewayConnectionName The virtual network gateway connection reset shared key Name.
* @param keyLength The virtual network connection reset shared key length, should between 1 and 128.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Observable<ConnectionResetSharedKey> resetSharedKeyAsync(String resourceGroupName, String virtualNetworkGatewayConnectionName, int keyLength);
/**
* Starts packet capture on virtual network gateway connection in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkGatewayConnectionName The name of the virtual network gateway connection.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Observable<String> startPacketCaptureAsync(String resourceGroupName, String virtualNetworkGatewayConnectionName);
/**
* Stops packet capture on virtual network gateway connection in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param virtualNetworkGatewayConnectionName The name of the virtual network gateway Connection.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable for the request
*/
Observable<String> stopPacketCaptureAsync(String resourceGroupName, String virtualNetworkGatewayConnectionName);
}
|
{
"pile_set_name": "Github"
}
|
{
"name": "elasticsearch-orders",
"config": {
"connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector",
"topics": "orders",
"connection.url": "$ELASTICSEARCH_URL",
"type.name": "microservices",
"key.ignore": true,
"key.converter": "org.apache.kafka.connect.storage.StringConverter",
"value.converter": "io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url": "$SCHEMA_REGISTRY_URL",
"value.converter.basic.auth.credentials.source": "$BASIC_AUTH_CREDENTIALS_SOURCE",
"value.converter.schema.registry.basic.auth.user.info": "$SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO",
"schema.ignore": true
}
}
|
{
"pile_set_name": "Github"
}
|
// Protocol Buffers for Go with Gadgets
//
// Copyright (c) 2013, The GoGo Authors. All rights reserved.
// http://github.com/gogo/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package test
import "testing"
func TestGetterExists(t *testing.T) {
_ = (&CastType{}).GetInt32()
}
|
{
"pile_set_name": "Github"
}
|
<?php
/**
* Copyright © Magento, Inc. All rights reserved.
* See COPYING.txt for license details.
*/
namespace Magento\Config\Controller\Adminhtml\System\Config;
use Magento\Framework\App\Action\HttpPostActionInterface as HttpPostActionInterface;
use Magento\Config\Controller\Adminhtml\System\AbstractConfig;
/**
* System Configuration Save Controller
*
* @author Magento Core Team <core@magentocommerce.com>
* @SuppressWarnings(PHPMD.CouplingBetweenObjects)
*/
class Save extends AbstractConfig implements HttpPostActionInterface
{
/**
* Backend Config Model Factory
*
* @var \Magento\Config\Model\Config\Factory
*/
protected $_configFactory;
/**
* @var \Magento\Framework\Cache\FrontendInterface
*/
protected $_cache;
/**
* @var \Magento\Framework\Stdlib\StringUtils
*/
protected $string;
/**
* @param \Magento\Backend\App\Action\Context $context
* @param \Magento\Config\Model\Config\Structure $configStructure
* @param \Magento\Config\Controller\Adminhtml\System\ConfigSectionChecker $sectionChecker
* @param \Magento\Config\Model\Config\Factory $configFactory
* @param \Magento\Framework\Cache\FrontendInterface $cache
* @param \Magento\Framework\Stdlib\StringUtils $string
*/
public function __construct(
\Magento\Backend\App\Action\Context $context,
\Magento\Config\Model\Config\Structure $configStructure,
\Magento\Config\Controller\Adminhtml\System\ConfigSectionChecker $sectionChecker,
\Magento\Config\Model\Config\Factory $configFactory,
\Magento\Framework\Cache\FrontendInterface $cache,
\Magento\Framework\Stdlib\StringUtils $string
) {
parent::__construct($context, $configStructure, $sectionChecker);
$this->_configFactory = $configFactory;
$this->_cache = $cache;
$this->string = $string;
}
/**
* @inheritdoc
*/
protected function _isAllowed()
{
return parent::_isAllowed() && $this->isSectionAllowed();
}
/**
* Checks if user has access to section.
*
* @return bool
*/
private function isSectionAllowed(): bool
{
$sectionId = $this->_request->getParam('section');
$isAllowed = $this->_configStructure->getElement($sectionId)->isAllowed();
if (!$isAllowed) {
$groups = $this->getRequest()->getPost('groups');
$fieldPath = $this->getFirstFieldPath($groups, $sectionId);
$fieldPaths = $this->_configStructure->getFieldPaths();
$fieldPath = $fieldPaths[$fieldPath][0] ?? $sectionId;
$explodedConfigPath = explode('/', $fieldPath);
$configSectionId = $explodedConfigPath[0] ?? $sectionId;
$isAllowed = $this->_configStructure->getElement($configSectionId)->isAllowed();
}
return $isAllowed;
}
/**
* Return field path as string.
*
* @param array $elements
* @param string $fieldPath
* @return string
*/
private function getFirstFieldPath(array $elements, string $fieldPath): string
{
$groupData = [];
foreach ($elements as $elementName => $element) {
if (!empty($element)) {
$fieldPath .= '/' . $elementName;
if (!empty($element['fields'])) {
$groupData = $element['fields'];
} elseif (!empty($element['groups'])) {
$groupData = $element['groups'];
}
if (!empty($groupData)) {
$fieldPath = $this->getFirstFieldPath($groupData, $fieldPath);
}
break;
}
}
return $fieldPath;
}
/**
* Get groups for save
*
* @return array|null
*/
protected function _getGroupsForSave()
{
$groups = $this->getRequest()->getPost('groups');
$files = $this->getRequest()->getFiles('groups');
if ($files && is_array($files)) {
/**
* Carefully merge $_FILES and $_POST information
* None of '+=' or 'array_merge_recursive' can do this correct
*/
foreach ($files as $groupName => $group) {
$data = $this->_processNestedGroups($group);
if (!empty($data)) {
if (!empty($groups[$groupName])) {
$groups[$groupName] = array_merge_recursive((array)$groups[$groupName], $data);
} else {
$groups[$groupName] = $data;
}
}
}
}
return $groups;
}
/**
* Process nested groups
*
* @param mixed $group
* @return array
*/
protected function _processNestedGroups($group)
{
$data = [];
if (isset($group['fields']) && is_array($group['fields'])) {
foreach ($group['fields'] as $fieldName => $field) {
if (!empty($field['value'])) {
$data['fields'][$fieldName] = ['value' => $field['value']];
}
}
}
if (isset($group['groups']) && is_array($group['groups'])) {
foreach ($group['groups'] as $groupName => $groupData) {
$nestedGroup = $this->_processNestedGroups($groupData);
if (!empty($nestedGroup)) {
$data['groups'][$groupName] = $nestedGroup;
}
}
}
return $data;
}
/**
* Custom save logic for section
*
* @return void
*/
protected function _saveSection()
{
$method = '_save' . $this->string->upperCaseWords($this->getRequest()->getParam('section'), '_', '');
if (method_exists($this, $method)) {
$this->{$method}();
}
}
/**
* Advanced save procedure
*
* @return void
*/
protected function _saveAdvanced()
{
$this->_cache->clean();
}
/**
* Save configuration
*
* @return \Magento\Backend\Model\View\Result\Redirect
*/
public function execute()
{
try {
// custom save logic
$this->_saveSection();
$section = $this->getRequest()->getParam('section');
$website = $this->getRequest()->getParam('website');
$store = $this->getRequest()->getParam('store');
$configData = [
'section' => $section,
'website' => $website,
'store' => $store,
'groups' => $this->_getGroupsForSave(),
];
$configData = $this->filterNodes($configData);
/** @var \Magento\Config\Model\Config $configModel */
$configModel = $this->_configFactory->create(['data' => $configData]);
$configModel->save();
$this->_eventManager->dispatch(
'admin_system_config_save',
['configData' => $configData, 'request' => $this->getRequest()]
);
$this->messageManager->addSuccess(__('You saved the configuration.'));
} catch (\Magento\Framework\Exception\LocalizedException $e) {
$messages = explode("\n", $e->getMessage());
foreach ($messages as $message) {
$this->messageManager->addError($message);
}
} catch (\Exception $e) {
$this->messageManager->addException(
$e,
__('Something went wrong while saving this configuration:') . ' ' . $e->getMessage()
);
}
$this->_saveState($this->getRequest()->getPost('config_state'));
/** @var \Magento\Backend\Model\View\Result\Redirect $resultRedirect */
$resultRedirect = $this->resultRedirectFactory->create();
return $resultRedirect->setPath(
'adminhtml/system_config/edit',
[
'_current' => ['section', 'website', 'store'],
'_nosid' => true
]
);
}
/**
* Filter paths that are not defined.
*
* @param string $prefix Path prefix
* @param array $groups Groups data.
* @param string[] $systemXmlConfig Defined paths.
* @return array Filtered groups.
* @SuppressWarnings(PHPMD.CyclomaticComplexity)
*/
private function filterPaths(string $prefix, array $groups, array $systemXmlConfig): array
{
$flippedXmlConfig = array_flip($systemXmlConfig);
$filtered = [];
foreach ($groups as $groupName => $childPaths) {
//When group accepts arbitrary fields and clones them we allow it
$group = $this->_configStructure->getElement($prefix .'/' .$groupName);
if (array_key_exists('clone_fields', $group->getData()) && $group->getData()['clone_fields']) {
$filtered[$groupName] = $childPaths;
continue;
}
$filtered[$groupName] = ['fields' => [], 'groups' => []];
//Processing fields
if (array_key_exists('fields', $childPaths)) {
foreach ($childPaths['fields'] as $field => $fieldData) {
//Constructing config path for the $field
$path = $prefix .'/' .$groupName .'/' .$field;
$element = $this->_configStructure->getElement($path);
if ($element
&& ($elementData = $element->getData())
&& array_key_exists('config_path', $elementData)
) {
$path = $elementData['config_path'];
}
//Checking whether it exists in system.xml
if (array_key_exists($path, $flippedXmlConfig)) {
$filtered[$groupName]['fields'][$field] = $fieldData;
}
}
}
//Recursively filtering this group's groups.
if (array_key_exists('groups', $childPaths) && $childPaths['groups']) {
$filteredGroups = $this->filterPaths(
$prefix .'/' .$groupName,
$childPaths['groups'],
$systemXmlConfig
);
if ($filteredGroups) {
$filtered[$groupName]['groups'] = $filteredGroups;
}
}
$filtered[$groupName] = array_filter($filtered[$groupName]);
}
return array_filter($filtered);
}
/**
* Filters nodes by checking whether they exist in system.xml.
*
* @param array $configData
* @return array
*/
private function filterNodes(array $configData): array
{
if (!empty($configData['groups'])) {
$systemXmlPathsFromKeys = array_keys($this->_configStructure->getFieldPaths());
$systemXmlPathsFromValues = array_reduce(
array_values($this->_configStructure->getFieldPaths()),
'array_merge',
[]
);
//Full list of paths defined in system.xml
$systemXmlConfig = array_merge($systemXmlPathsFromKeys, $systemXmlPathsFromValues);
$configData['groups'] = $this->filterPaths($configData['section'], $configData['groups'], $systemXmlConfig);
}
return $configData;
}
}
|
{
"pile_set_name": "Github"
}
|
<?php
require_once 'nowdoc.inc';
print <<<ENDOFHEREDOC
This is heredoc test #s $a, {$b}, {$c['c']}, and {$d->d}.
ENDOFHEREDOC;
$x = <<<ENDOFHEREDOC
This is heredoc test #s $a, {$b}, {$c['c']}, and {$d->d}.
ENDOFHEREDOC;
print "{$x}";
?>
|
{
"pile_set_name": "Github"
}
|
// Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/**
* Any statement within "do-while" construction must be a compound
*
* @path ch12/12.6/12.6.1/S12.6.1_A12.js
* @description Checking if execution of "do var x=1; var y =2; while (0)" fails
* @negative
*/
//////////////////////////////////////////////////////////////////////////////
//CHECK#1
do var x=1; var y =2; while (0);
//
//////////////////////////////////////////////////////////////////////////////
|
{
"pile_set_name": "Github"
}
|
<!DOCTYPE html>
<html>
<meta charset="UTF-8">
<head>
<title>Topic 07 -- Abstracts with Biological Entities (English) - 75 Topics / Sub-Topic Model 08 - 15 Topics</title>
<style>
table {
font-family: "Trebuchet MS", Arial, Helvetica, sans-serif;
border-collapse: collapse;
width: 100%;
}
td, th {
border: 1px solid #ddd;
padding: 8px;
}
tr:nth-child(even){background-color: #f2f2f2;}
tr:hover {background-color: #ddd;}
th {
padding-top: 12px;
padding-bottom: 12px;
text-align: left;
background-color: #0099FF;
color: white;
}
</style>
</head>
<body>
<h2>Topic 07 -- Abstracts with Biological Entities (English) - 75 Topics / Sub-Topic Model 08 - 15 Topics</h2>
<table border="1" class="dataframe">
<thead>
<tr style="text-align: right;">
<th></th>
<th>cite ad</th>
<th>title</th>
<th>authors</th>
<th>publish year</th>
<th>publish time</th>
<th>dataset</th>
<th>abstract mentions covid</th>
<th>pmcid</th>
<th>pubmed id</th>
<th>doi</th>
<th>cord uid</th>
<th>topic weight</th>
<th>Similarity scispacy</th>
<th>Similarity specter</th>
</tr>
</thead>
<tbody>
<tr>
<th id="a2m46j0z";>1</th>
<td>Tian_2016</td>
<td>The similarity analysis of financial stocks based on information clustering</td>
<td>Tian, Qiang; Shang, Pengjian; Feng, Guochen</td>
<td>2016</td>
<td>2016-05-26</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7088863" target="_blank">PMC7088863</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/s11071-016-2851-9" target="_blank">10.1007/s11071-016-2851-9</a></td>
<td>a2m46j0z</td>
<td>0.886806</td>
<td></td>
<td><a href="Topic_07.html#tjnt01ld">Argyroudis_2019</a></td>
</tr>
<tr>
<th id="b25mi0sp";>2</th>
<td>Zhao_2013</td>
<td>Estimation with Right-Censored Observations Under A Semi-Markov Model</td>
<td>Zhao, Lihui; Hu, X. Joan</td>
<td>2013</td>
<td>2013-03-01</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC3713855" target="_blank">PMC3713855</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/23874060.0" target="_blank">23874060.0</a></td>
<td><a href="https://doi.org/10.1002/cjs.11176" target="_blank">10.1002/cjs.11176</a></td>
<td>b25mi0sp</td>
<td>0.885701</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a></td>
<td><a href="Topic_07.html#cok7vbmg">Soubeyrand_2007</a>, <a href="Topic_04.html#ecxcw96m">Black_2013</a>, <a href="Topic_04.html#88xk1e6h">Hirose_2007</a>, <a href="Topic_02.html#dqg8fkca">Roques_2020</a></td>
</tr>
<tr>
<th id="8k22red9";>3</th>
<td>So_M_2008</td>
<td>A multivariate threshold stochastic volatility model</td>
<td>So, Mike K.P.; Choi, C.Y.</td>
<td>2008</td>
<td>2008-12-01</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7127604" target="_blank">PMC7127604</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.matcom.2007.12.003" target="_blank">10.1016/j.matcom.2007.12.003</a></td>
<td>8k22red9</td>
<td>0.843205</td>
<td><a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_07.html#b25mi0sp">Zhao_2013</a></td>
<td><a href="Topic_07.html#eszlg47q">Liao_2013</a></td>
</tr>
<tr>
<th id="hoa39sx2";>4</th>
<td>Wan_W_2011</td>
<td>Bayesian analysis of robust Poisson geometric process model using heavy-tailed distributions</td>
<td>Wan, Wai-Yin; Chan, Jennifer So-Kuen</td>
<td>2011</td>
<td>2011-01-01</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7114253" target="_blank">PMC7114253</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.csda.2010.06.011" target="_blank">10.1016/j.csda.2010.06.011</a></td>
<td>hoa39sx2</td>
<td>0.811831</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a>, <a href="Topic_07.html#b25mi0sp">Zhao_2013</a></td>
<td><a href="Topic_07.html#b25mi0sp">Zhao_2013</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a></td>
</tr>
<tr>
<th id="8moz4g71";>5</th>
<td>Zou_X_2014</td>
<td>Optimal harvesting for a stochastic regime-switching logistic diffusion system with jumps</td>
<td>Zou, Xiaoling; Wang, Ke</td>
<td>2014</td>
<td>2014-08-31</td>
<td>PMC</td>
<td>N</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.nahs.2014.01.001" target="_blank">10.1016/j.nahs.2014.01.001</a></td>
<td>8moz4g71</td>
<td>0.765850</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a></td>
<td><a href="Topic_06.html#04p0c4jt">Guo_W_2018</a>, <a href="Topic_06.html#774ywcj0">Chen_2018</a>, <a href="Topic_01.html#hp2d68su">Cazelles_2018</a>, <a href="Topic_06.html#yjodgy2v">Lan_G_2019</a></td>
</tr>
<tr>
<th id="hi5afmbw";>6</th>
<td>Pan_J_2008</td>
<td>Estimation and tests for power-transformed and threshold GARCH models</td>
<td>Pan, Jiazhu; Wang, Hui; Tong, Howell</td>
<td>2008</td>
<td>2008-01-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7116990" target="_blank">PMC7116990</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.jeconom.2007.06.004" target="_blank">10.1016/j.jeconom.2007.06.004</a></td>
<td>hi5afmbw</td>
<td>0.751901</td>
<td><a href="Topic_07.html#63wqn2fg">Höhle_2007</a>, <a href="Topic_07.html#b25mi0sp">Zhao_2013</a>, <a href="Topic_06.html#9nhauvss">Pekalp_2019</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a></td>
<td><a href="Topic_07.html#b25mi0sp">Zhao_2013</a>, <a href="Topic_04.html#88xk1e6h">Hirose_2007</a>, <a href="Topic_07.html#cok7vbmg">Soubeyrand_2007</a></td>
</tr>
<tr>
<th id="geiq55mq";>7</th>
<td>Xiong_2016</td>
<td>Weighted multifractal cross-correlation analysis based on Shannon entropy</td>
<td>Xiong, Hui; Shang, Pengjian</td>
<td>2016</td>
<td>2016-01-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7128505" target="_blank">PMC7128505</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.cnsns.2015.06.029" target="_blank">10.1016/j.cnsns.2015.06.029</a></td>
<td>geiq55mq</td>
<td>0.742341</td>
<td><a href="Topic_07.html#b25mi0sp">Zhao_2013</a></td>
<td><a href="Topic_07.html#a2m46j0z">Tian_2016</a>, <a href="Topic_07.html#tjnt01ld">Argyroudis_2019</a>, <a href="Topic_04.html#7umn0vkv">Tao_Y_2020</a>, <a href="Topic_07.html#hi5afmbw">Pan_J_2008</a></td>
</tr>
<tr>
<th id="cok7vbmg";>8</th>
<td>Soubeyrand_2007</td>
<td>Model-based estimation of the link between the daily survival probability and a time-varying covariate, application to mosquitofish survival data</td>
<td>Soubeyrand, Samuel; Beaudouin, Rémy; Desassis, Nicolas; Monod, Gilles</td>
<td>2007</td>
<td>2007-12-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7125893" target="_blank">PMC7125893</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/17706252.0" target="_blank">17706252.0</a></td>
<td><a href="https://doi.org/10.1016/j.mbs.2007.06.005" target="_blank">10.1016/j.mbs.2007.06.005</a></td>
<td>cok7vbmg</td>
<td>0.721478</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_07.html#b25mi0sp">Zhao_2013</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a></td>
<td><a href="Topic_03.html#ge871ypq">Safi_2011</a>, <a href="Topic_04.html#ecxcw96m">Black_2013</a>, <a href="Topic_03.html#rqqke40c">Heffernan_2006</a>, <a href="Topic_02.html#dqg8fkca">Roques_2020</a></td>
</tr>
<tr>
<th id="6riyqn4k";>9</th>
<td>Getz_2017</td>
<td>Modeling Epidemics: A Primer and Numerus Software Implementation</td>
<td>Wayne M. Getz; Richard Salter; Oliver Muellerklein; Hyun S. Yoon; Krti Tallam</td>
<td>2017</td>
<td>2017-09-22</td>
<td>BioRxiv</td>
<td>N</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/191601" target="_blank">10.1101/191601</a></td>
<td>6riyqn4k</td>
<td>0.687620</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_08.html#vt6nh3vs">Kenah_2007</a>, <a href="Topic_07.html#7c4jsvlz">Simon_2010</a></td>
<td><a href="Topic_01.html#fmc5fwf3">Vincenot_2011</a>, <a href="Topic_03.html#1xz19ai8">Feng_2018</a>, <a href="Topic_03.html#gmpsniqt">Clancy_2015</a>, <a href="Topic_08.html#ygdau2px">Roy_M_2006</a></td>
</tr>
<tr>
<th id="63wqn2fg";>10</th>
<td>Höhle_2007</td>
<td>RLadyBug—An R package for stochastic epidemic models</td>
<td>Höhle, Michael; Feldmann, Ulrike</td>
<td>2007</td>
<td>2007-10-15</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7114252" target="_blank">PMC7114252</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.csda.2006.11.016" target="_blank">10.1016/j.csda.2006.11.016</a></td>
<td>63wqn2fg</td>
<td>0.651087</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a></td>
<td><a href="Topic_13.html#aqyqofe7">Zimmer_2017</a>, <a href="Topic_04.html#ecxcw96m">Black_2013</a></td>
</tr>
<tr>
<th id="wljou004";>11</th>
<td>White_2007</td>
<td>Modeling epidemics using cellular automata</td>
<td>White, S. Hoya; del Rey, A. Martín; Sánchez, G. Rodríguez</td>
<td>2007</td>
<td>2007-03-01</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7127728" target="_blank">PMC7127728</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.amc.2006.06.126" target="_blank">10.1016/j.amc.2006.06.126</a></td>
<td>wljou004</td>
<td>0.630193</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_08.html#h1i29kcq">Li_C_2018</a></td>
<td><a href="Topic_06.html#04p0c4jt">Guo_W_2018</a>, <a href="Topic_06.html#ru2mv7wv">Fan_K_2020</a>, <a href="Topic_03.html#fsqbgyaw">Sherborne_2015</a>, <a href="Topic_01.html#3x2qf3yu">Bin_S_2019</a></td>
</tr>
<tr>
<th id="lhv83zac";>12</th>
<td>Bliznashki_2020</td>
<td>A Bayesian Logistic Growth Model for the Spread of COVID-19 in New York</td>
<td>Svetoslav Bliznashki</td>
<td>2020</td>
<td>2020-04-07</td>
<td>BioRxiv</td>
<td>N</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.04.05.20054577" target="_blank">10.1101/2020.04.05.20054577</a></td>
<td>lhv83zac</td>
<td>0.622302</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_07.html#b25mi0sp">Zhao_2013</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a></td>
<td><a href="Topic_10.html#o3hytzwu">Chong_2020</a>, <a href="Topic_02.html#nc5rtwtd">Smeets_2020</a>, <a href="Topic_04.html#ecxcw96m">Black_2013</a>, <a href="Topic_03.html#ge871ypq">Safi_2011</a></td>
</tr>
<tr>
<th id="tjnt01ld";>13</th>
<td>Argyroudis_2019</td>
<td>Spillover effects of Great Recession on Hong-Kong’s Real Estate Market: An analysis based on Causality Plane and Tsallis Curves of Complexity–Entropy</td>
<td>Argyroudis, George S.; Siokis, Fotios M.</td>
<td>2019</td>
<td>2019-06-15</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7126015" target="_blank">PMC7126015</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.physa.2019.04.052" target="_blank">10.1016/j.physa.2019.04.052</a></td>
<td>tjnt01ld</td>
<td>0.578203</td>
<td><a href="Topic_08.html#h1i29kcq">Li_C_2018</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a></td>
<td></td>
</tr>
<tr>
<th id="w4g8mzuy";>14</th>
<td>Groendyke_2012</td>
<td>A Network-based Analysis of the 1861 Hagelloch Measles Data</td>
<td>Groendyke, Chris; Welch, David; Hunter, David R.</td>
<td>2012</td>
<td>2012-02-24</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4553425" target="_blank">PMC4553425</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/22364540.0" target="_blank">22364540.0</a></td>
<td><a href="https://doi.org/10.1111/j.1541-0420.2012.01748.x" target="_blank">10.1111/j.1541-0420.2012.01748.x</a></td>
<td>w4g8mzuy</td>
<td>0.568272</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_07.html#63wqn2fg">Höhle_2007</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a></td>
<td><a href="Topic_03.html#ge871ypq">Safi_2011</a>, <a href="Topic_10.html#3gj754dq">Giardina_2017</a>, <a href="Topic_04.html#a95hh4yk">Small_2005</a></td>
</tr>
<tr>
<th id="v4nhkt4p";>15</th>
<td>Getz_2017</td>
<td>Discrete Stochastic Analogs of Erlang Epidemic Models</td>
<td>Getz, Wayne M.; Dougherty, Eric R.</td>
<td>2017</td>
<td>2017-11-20</td>
<td>None</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6120589" target="_blank">PMC6120589</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/29157162.0" target="_blank">29157162.0</a></td>
<td><a href="https://doi.org/10.1080/17513758.2017.1401677" target="_blank">10.1080/17513758.2017.1401677</a></td>
<td>v4nhkt4p</td>
<td>0.552647</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_07.html#63wqn2fg">Höhle_2007</a>, <a href="Topic_08.html#vt6nh3vs">Kenah_2007</a>, <a href="Topic_07.html#b25mi0sp">Zhao_2013</a></td>
<td><a href="Topic_08.html#ygdau2px">Roy_M_2006</a>, <a href="Topic_03.html#6ev4ed6z">Chen_2019</a>, <a href="Topic_13.html#aqyqofe7">Zimmer_2017</a></td>
</tr>
<tr>
<th id="7c4jsvlz";>16</th>
<td>Simon_2010</td>
<td>Exact epidemic models on graphs using graph-automorphism driven lumping</td>
<td>Simon, Péter L.; Taylor, Michael; Kiss, Istvan Z.</td>
<td>2010</td>
<td>2010-04-28</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7079990" target="_blank">PMC7079990</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/20425114.0" target="_blank">20425114.0</a></td>
<td><a href="https://doi.org/10.1007/s00285-010-0344-x" target="_blank">10.1007/s00285-010-0344-x</a></td>
<td>7c4jsvlz</td>
<td>0.530060</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a></td>
<td><a href="Topic_08.html#vt6nh3vs">Kenah_2007</a>, <a href="Topic_08.html#n5i4xez4">Rolls_2015</a>, <a href="Topic_03.html#fsqbgyaw">Sherborne_2015</a>, <a href="Topic_08.html#ygdau2px">Roy_M_2006</a></td>
</tr>
<tr>
<th id="pkac2i3b";>17</th>
<td>Kosmidis_2020</td>
<td>A Fractal kinetics SI model can explain the dynamics of COVID-19 epidemics</td>
<td>Kosmas Kosmidis; Panos Macheras</td>
<td>2020</td>
<td>2020-04-17</td>
<td>BioRxiv</td>
<td>Y</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.04.11.20061366" target="_blank">10.1101/2020.04.11.20061366</a></td>
<td>pkac2i3b</td>
<td>0.518695</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_08.html#ogliq8nj">Li_K_2011</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_08.html#h1i29kcq">Li_C_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a></td>
<td></td>
</tr>
<tr>
<th id="lwyeqqvy";>18</th>
<td>Hu_J_2016</td>
<td>Optimal Data Transmission Strategy for Healthcare-Based Wireless Sensor Networks: A Stochastic Differential Game Approach</td>
<td>Hu, Jiahui; Qian, Qing; Fang, An; Wu, Sizhu; Xie, Yi</td>
<td>2016</td>
<td>2016-05-13</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7088864" target="_blank">PMC7088864</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/s11277-016-3316-7" target="_blank">10.1007/s11277-016-3316-7</a></td>
<td>lwyeqqvy</td>
<td>0.502758</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#mh30ocvo">Duan_2015</a></td>
<td><a href="Topic_07.html#u48mhxbc">Liu_T_2010</a></td>
</tr>
<tr>
<th id="r01erhp3";>19</th>
<td>Jo_H_2020</td>
<td>Analysis of COVID-19 spread in South Korea using the SIR model with time-dependent parameters and deep learning</td>
<td>Hyeontae Jo; Hwijae Son; Se Young Jung; Hyung Ju Hwang</td>
<td>2020</td>
<td>2020-04-17</td>
<td>BioRxiv</td>
<td>Y</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.04.13.20063412" target="_blank">10.1101/2020.04.13.20063412</a></td>
<td>r01erhp3</td>
<td>0.502271</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_07.html#63wqn2fg">Höhle_2007</a></td>
<td></td>
</tr>
<tr>
<th id="eszlg47q";>20</th>
<td>Liao_2013</td>
<td>The benefit of modeling jumps in realized volatility for risk prediction: Evidence from Chinese mainland stocks</td>
<td>Liao, Yin</td>
<td>2013</td>
<td>2013-06-30</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7147854" target="_blank">PMC7147854</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.pacfin.2013.01.002" target="_blank">10.1016/j.pacfin.2013.01.002</a></td>
<td>eszlg47q</td>
<td>0.495585</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_07.html#8k22red9">So_M_2008</a>, <a href="Topic_01.html#mh30ocvo">Duan_2015</a></td>
<td></td>
</tr>
<tr>
<th id="wofyftrs";>21</th>
<td>Hao_T_2020</td>
<td>Prediction of Coronavirus Disease (covid-19) Evolution in USA with the Model Based on the Eyring Rate Process Theory and Free Volume Concept</td>
<td>Tian Hao</td>
<td>2020</td>
<td>2020-04-22</td>
<td>BioRxiv</td>
<td>Y</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.04.16.20068692" target="_blank">10.1101/2020.04.16.20068692</a></td>
<td>wofyftrs</td>
<td>0.482588</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#5ge7ozpd">Zheng_2020</a>, <a href="Topic_01.html#cx3ex9ru">Leach_2010</a></td>
<td></td>
</tr>
<tr>
<th id="l9wrrapv";>22</th>
<td>Duchêne_2015</td>
<td>Evaluating the Adequacy of Molecular Clock Models Using Posterior Predictive Simulations</td>
<td>Duchêne, David A.; Duchêne, Sebastian; Holmes, Edward C.; Ho, Simon Y.W.</td>
<td>2015</td>
<td>2015-07-10</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7107558" target="_blank">PMC7107558</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/26163668.0" target="_blank">26163668.0</a></td>
<td><a href="https://doi.org/10.1093/molbev/msv154" target="_blank">10.1093/molbev/msv154</a></td>
<td>l9wrrapv</td>
<td>0.476117</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a></td>
<td><a href="Topic_10.html#zw5kt090">Hilton_2018</a></td>
</tr>
<tr>
<th id="jf36as70";>23</th>
<td>Kim_S_2020</td>
<td>AAEDM: Theoretical Dynamic Epidemic Diffusion Model and Covid-19 Korea Pandemic Cases</td>
<td>Song-Kyoo Kim</td>
<td>2020</td>
<td>2020-03-20</td>
<td>BioRxiv</td>
<td>Y</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.03.17.20037838" target="_blank">10.1101/2020.03.17.20037838</a></td>
<td>jf36as70</td>
<td>0.475249</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_01.html#iwwgfh9y">Grassly_2008</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a></td>
<td><a href="Topic_02.html#azpz6e7q">Distante_2020</a>, <a href="Topic_02.html#m87tapjp">Peng_2020</a>, <a href="Topic_02.html#tc8eru1w">Zhan_2020</a></td>
</tr>
<tr>
<th id="bph4nuch";>24</th>
<td>Fujie_2007</td>
<td>Effects of superspreaders in spread of epidemic</td>
<td>Fujie, Ryo; Odagaki, Takashi</td>
<td>2007</td>
<td>2007-02-01</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7127014" target="_blank">PMC7127014</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.physa.2006.08.050" target="_blank">10.1016/j.physa.2006.08.050</a></td>
<td>bph4nuch</td>
<td>0.464528</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_08.html#vt6nh3vs">Kenah_2007</a>, <a href="Topic_08.html#ogliq8nj">Li_K_2011</a>, <a href="Topic_08.html#h1i29kcq">Li_C_2018</a></td>
<td><a href="Topic_08.html#qzgxe24c">Shang_2013</a>, <a href="Topic_03.html#1tkevj8v">Edholm_2018</a>, <a href="Topic_03.html#qfo5sw0t">James_2006</a>, <a href="Topic_03.html#dqkjofw2">Small_2006</a></td>
</tr>
<tr>
<th id="b39fo4a8";>25</th>
<td>Wu_Z_2015</td>
<td>Partially latent class models for case–control studies of childhood pneumonia aetiology</td>
<td>Wu, Zhenke; Deloria‐Knoll, Maria; Hammitt, Laura L.; Zeger, Scott L.</td>
<td>2015</td>
<td>2015-03-26</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7169268" target="_blank">PMC7169268</a></td>
<td></td>
<td><a href="https://doi.org/10.1111/rssc.12101" target="_blank">10.1111/rssc.12101</a></td>
<td>b39fo4a8</td>
<td>0.456252</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a>, <a href="Topic_08.html#h1i29kcq">Li_C_2018</a>, <a href="Topic_01.html#5ge7ozpd">Zheng_2020</a></td>
<td></td>
</tr>
<tr>
<th id="lfm6erzy";>26</th>
<td>Ross_2016</td>
<td>How domain growth is implemented determines the long term behaviour of a cell population through its effect on spatial correlations</td>
<td>Robert J. H. Ross; R. E. Baker; C. A. Yates</td>
<td>2016</td>
<td>2016-02-26</td>
<td>BioRxiv</td>
<td>N</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/041509" target="_blank">10.1101/041509</a></td>
<td>lfm6erzy</td>
<td>0.446933</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_10.html#i7navmbn">O'Dea_2010</a></td>
<td></td>
</tr>
<tr>
<th id="3y89lumh";>27</th>
<td>Menendez_2020</td>
<td>Elementary time-delay dynamics of COVID-19 disease</td>
<td>Jose Menendez</td>
<td>2020</td>
<td>2020-03-30</td>
<td>BioRxiv</td>
<td>Y</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.03.27.20045328" target="_blank">10.1101/2020.03.27.20045328</a></td>
<td>3y89lumh</td>
<td>0.446293</td>
<td><a href="Topic_08.html#h1i29kcq">Li_C_2018</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a>, <a href="Topic_02.html#a9bnafr7">Renna_2020</a>, <a href="Topic_03.html#tlluxd11">Welch_2011</a></td>
<td><a href="Topic_02.html#c800ynvc">Shi_P_2020</a>, <a href="Topic_04.html#5po7q64l">Yong_2016</a>, <a href="Topic_02.html#nc5rtwtd">Smeets_2020</a></td>
</tr>
<tr>
<th id="a47l7m47";>28</th>
<td>Souza_2020</td>
<td>Using curvature to infer COVID-19 fractal epidemic network fragility and systemic risk</td>
<td>Danillo Barros de Souza; Fernando A N Santos; Everlon Figueiroa; Jailson B Correia; Hernande P da Silva; Jose Luiz de Lima Filho; Jones Albuquerque</td>
<td>2020</td>
<td>2020-04-06</td>
<td>BioRxiv</td>
<td>Y</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.04.01.20047225" target="_blank">10.1101/2020.04.01.20047225</a></td>
<td>a47l7m47</td>
<td>0.416443</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_08.html#ogliq8nj">Li_K_2011</a>, <a href="Topic_08.html#h1i29kcq">Li_C_2018</a></td>
<td><a href="Topic_06.html#vqagpbiu">Denphedtnong_2013</a>, <a href="Topic_14.html#umiql1te">Loberg_2020</a>, <a href="Topic_03.html#a9fds45e">Bombardt_2006</a></td>
</tr>
<tr>
<th id="zriuh5q5";>29</th>
<td>Hao_T_2020</td>
<td>Infection Dynamics of Coronavirus Disease 2019 (Covid-19) Modeled with the Integration of the Eyring Rate Process Theory and Free Volume Concept</td>
<td>Tian Hao</td>
<td>2020</td>
<td>2020-02-29</td>
<td>BioRxiv</td>
<td>Y</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.02.26.20028571" target="_blank">10.1101/2020.02.26.20028571</a></td>
<td>zriuh5q5</td>
<td>0.415687</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_03.html#tlluxd11">Welch_2011</a>, <a href="Topic_08.html#h1i29kcq">Li_C_2018</a></td>
<td></td>
</tr>
<tr>
<th id="qnn4v0k2";>30</th>
<td>Maeno_2010</td>
<td>Discovering network behind infectious disease outbreak</td>
<td>Maeno, Yoshiharu</td>
<td>2010</td>
<td>2010-11-01</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7125928" target="_blank">PMC7125928</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.physa.2010.07.014" target="_blank">10.1016/j.physa.2010.07.014</a></td>
<td>qnn4v0k2</td>
<td>0.408967</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_08.html#vt6nh3vs">Kenah_2007</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_07.html#63wqn2fg">Höhle_2007</a></td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_03.html#1xz19ai8">Feng_2018</a>, <a href="Topic_03.html#fsqbgyaw">Sherborne_2015</a>, <a href="Topic_01.html#tp3qt9pb">Colizza_2007</a></td>
</tr>
<tr>
<th id="q8539o41";>31</th>
<td>Grant_2020</td>
<td>Dynamics of COVID‐19 epidemics: SEIR models underestimate peak infection rates and overestimate epidemic duration</td>
<td>Alastair Grant</td>
<td>2020</td>
<td>2020-04-06</td>
<td>BioRxiv</td>
<td>N</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.04.02.20050674" target="_blank">10.1101/2020.04.02.20050674</a></td>
<td>q8539o41</td>
<td>0.406579</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_08.html#vt6nh3vs">Kenah_2007</a>, <a href="Topic_03.html#tlluxd11">Welch_2011</a>, <a href="Topic_08.html#h1i29kcq">Li_C_2018</a></td>
<td><a href="Topic_03.html#kgkcgpmq">Kretzschmar_2020</a>, <a href="Topic_13.html#aqyqofe7">Zimmer_2017</a>, <a href="Topic_03.html#fsqbgyaw">Sherborne_2015</a></td>
</tr>
<tr>
<th id="ewooc2am";>32</th>
<td>Brett_2017</td>
<td>Anticipating the emergence of infectious diseases</td>
<td>Brett, Tobias S.; Drake, John M.; Rohani, Pejman</td>
<td>2017</td>
<td>2017-07-05</td>
<td>COMM-USE</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC5550966" target="_blank">PMC5550966</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/28679666.0" target="_blank">28679666.0</a></td>
<td><a href="https://doi.org/10.1098/rsif.2017.0115" target="_blank">10.1098/rsif.2017.0115</a></td>
<td>ewooc2am</td>
<td>0.402905</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#qlypp5z6">Bauer_2009</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a></td>
<td><a href="Topic_01.html#q2o71o4d">Dimitrov_2008</a>, <a href="Topic_03.html#6ev4ed6z">Chen_2019</a></td>
</tr>
<tr>
<th id="itviia7v";>33</th>
<td>Chandra_2020</td>
<td>Stochastic Compartmental Modelling of SARS-CoV-2 with Approximate Bayesian Computation</td>
<td>Vedant Chandra</td>
<td>2020</td>
<td>2020-04-01</td>
<td>BioRxiv</td>
<td>Y</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.03.29.20046862" target="_blank">10.1101/2020.03.29.20046862</a></td>
<td>itviia7v</td>
<td>0.400462</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_02.html#a9bnafr7">Renna_2020</a></td>
<td><a href="Topic_02.html#nc5rtwtd">Smeets_2020</a>, <a href="Topic_03.html#ge871ypq">Safi_2011</a>, <a href="Topic_07.html#z19umr4q">Chernyshev_2020</a></td>
</tr>
<tr>
<th id="u48mhxbc";>34</th>
<td>Liu_T_2010</td>
<td>Integration of small world networks with multi-agent systems for simulating epidemic spatiotemporal transmission</td>
<td>Liu, Tao; Li, Xia; Liu, XiaoPing</td>
<td>2010</td>
<td>2010-05-06</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7089090" target="_blank">PMC7089090</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/s11434-009-0623-3" target="_blank">10.1007/s11434-009-0623-3</a></td>
<td>u48mhxbc</td>
<td>0.389794</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_08.html#ogliq8nj">Li_K_2011</a></td>
<td><a href="Topic_08.html#xdlk8557">Han_X_2014</a>, <a href="Topic_08.html#auy0h9wc">Song_2015</a>, <a href="Topic_08.html#xbdrf8h9">Kan_J_2017</a>, <a href="Topic_06.html#vqagpbiu">Denphedtnong_2013</a></td>
</tr>
<tr>
<th id="n5i4xez4";>35</th>
<td>Rolls_2015</td>
<td>A Simulation Study Comparing Epidemic Dynamics on Exponential Random Graph and Edge-Triangle Configuration Type Contact Network Models</td>
<td>Rolls, David A.; Wang, Peng; McBryde, Emma; Pattison, Philippa; Robins, Garry</td>
<td>2015</td>
<td>2015-11-10</td>
<td>None</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC4640514" target="_blank">PMC4640514</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/26555701.0" target="_blank">26555701.0</a></td>
<td><a href="https://doi.org/10.1371/journal.pone.0142181" target="_blank">10.1371/journal.pone.0142181</a></td>
<td>n5i4xez4</td>
<td>0.388437</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_08.html#vt6nh3vs">Kenah_2007</a></td>
<td><a href="Topic_08.html#xdlk8557">Han_X_2014</a>, <a href="Topic_08.html#vt6nh3vs">Kenah_2007</a>, <a href="Topic_07.html#w4g8mzuy">Groendyke_2012</a></td>
</tr>
<tr>
<th id="nexylnv4";>36</th>
<td>Abdulrahman_2020</td>
<td>SimCOVID: An Open-Source Simulink-Based Program for Simulating the COVID-19 Epidemic</td>
<td>Ismael Khorshed Abdulrahman</td>
<td>2020</td>
<td>2020-04-17</td>
<td>BioRxiv</td>
<td>Y</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.04.13.20063354" target="_blank">10.1101/2020.04.13.20063354</a></td>
<td>nexylnv4</td>
<td>0.385376</td>
<td><a href="Topic_07.html#63wqn2fg">Höhle_2007</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_02.html#a9bnafr7">Renna_2020</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a></td>
<td></td>
</tr>
<tr>
<th id="isqqhki4";>37</th>
<td>Mummert_2019</td>
<td>Parameter identification for a stochastic SEIRS epidemic model: case study influenza</td>
<td>Mummert, Anna; Otunuga, Olusegun M.</td>
<td>2019</td>
<td>2019-05-06</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7080032" target="_blank">PMC7080032</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/31062075.0" target="_blank">31062075.0</a></td>
<td><a href="https://doi.org/10.1007/s00285-019-01374-z" target="_blank">10.1007/s00285-019-01374-z</a></td>
<td>isqqhki4</td>
<td>0.385271</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_04.html#lwwzacy2">Kenah_2012</a>, <a href="Topic_07.html#63wqn2fg">Höhle_2007</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a>, <a href="Topic_03.html#tlluxd11">Welch_2011</a></td>
<td><a href="Topic_03.html#ge871ypq">Safi_2011</a>, <a href="Topic_02.html#c800ynvc">Shi_P_2020</a>, <a href="Topic_05.html#ccmd0dw8">Park_2016</a>, <a href="Topic_06.html#vqagpbiu">Denphedtnong_2013</a></td>
</tr>
<tr>
<th id="ueglfhux";>38</th>
<td>Dube_2008</td>
<td>Mathematical Analysis of Copy Number Variation in a DNA Sample Using Digital PCR on a Nanofluidic Device</td>
<td>Dube, Simant; Qin, Jian; Ramakrishnan, Ramesh</td>
<td>2008</td>
<td>2008-08-06</td>
<td>COMM-USE</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC2483940" target="_blank">PMC2483940</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/18682853.0" target="_blank">18682853.0</a></td>
<td><a href="https://doi.org/10.1371/journal.pone.0002876" target="_blank">10.1371/journal.pone.0002876</a></td>
<td>ueglfhux</td>
<td>0.384306</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="oglwveln";>39</th>
<td>Walker_2010</td>
<td>Parameter inference in small world network disease models with approximate Bayesian Computational methods</td>
<td>Walker, David M.; Allingham, David; Lee, Heung Wing Joseph; Small, Michael</td>
<td>2010</td>
<td>2010-02-01</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7126433" target="_blank">PMC7126433</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.physa.2009.09.053" target="_blank">10.1016/j.physa.2009.09.053</a></td>
<td>oglwveln</td>
<td>0.370786</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_07.html#63wqn2fg">Höhle_2007</a></td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a></td>
</tr>
<tr>
<th id="cf2bbn3p";>40</th>
<td>Bettencourt_2007</td>
<td>Towards Real Time Epidemiology: Data Assimilation, Modeling and Anomaly Detection of Health Surveillance Data Streams</td>
<td>Bettencourt, Luís M. A.; Ribeiro, Ruy M.; Chowell, Gerardo; Lant, Timothy; Castillo-Chavez, Carlos</td>
<td>2007</td>
<td>2007-01-01</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7122958" target="_blank">PMC7122958</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/978-3-540-72608-1_8" target="_blank">10.1007/978-3-540-72608-1_8</a></td>
<td>cf2bbn3p</td>
<td>0.369036</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a></td>
<td><a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_01.html#dp4qv77q">Southall_2020</a>, <a href="Topic_01.html#6vty0pmk">Lega_2016</a>, <a href="Topic_01.html#zdft23q8">Cauchemez_2012</a></td>
</tr>
<tr>
<th id="a4d7eqa0";>41</th>
<td>Hsu_C_2015</td>
<td>Analysis of household data on influenza epidemic with Bayesian hierarchical model</td>
<td>Hsu, C.Y.; Yen, A.M.F.; Chen, L.S.; Chen, H.H.</td>
<td>2015</td>
<td>2015-03-31</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7094348" target="_blank">PMC7094348</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/25484132.0" target="_blank">25484132.0</a></td>
<td><a href="https://doi.org/10.1016/j.mbs.2014.11.006" target="_blank">10.1016/j.mbs.2014.11.006</a></td>
<td>a4d7eqa0</td>
<td>0.366303</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_07.html#63wqn2fg">Höhle_2007</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a></td>
<td><a href="Topic_01.html#fiz00uj3">Park_2017</a>, <a href="Topic_03.html#ge871ypq">Safi_2011</a>, <a href="Topic_07.html#w4g8mzuy">Groendyke_2012</a></td>
</tr>
<tr>
<th id="0auqfxur";>42</th>
<td>Thomas_2017</td>
<td>Extended models for nosocomial infection: parameter estimation and model selection</td>
<td>Thomas, Alun; Khader, Karim; Redd, Andrew; Leecaster, Molly; Zhang, Yue; Jones, Makoto; Greene, Tom; Samore, Matthew</td>
<td>2017</td>
<td>2017-10-12</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6145396" target="_blank">PMC6145396</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/29040678.0" target="_blank">29040678.0</a></td>
<td><a href="https://doi.org/10.1093/imammb/dqx010" target="_blank">10.1093/imammb/dqx010</a></td>
<td>0auqfxur</td>
<td>0.366303</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a></td>
<td><a href="Topic_09.html#lm4rfabw">López‐García_2019</a>, <a href="Topic_03.html#ge871ypq">Safi_2011</a></td>
</tr>
<tr>
<th id="msgan1mk";>43</th>
<td>Maeno_2011</td>
<td>Discovery of a missing disease spreader</td>
<td>Maeno, Yoshiharu</td>
<td>2011</td>
<td>2011-10-01</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7126838" target="_blank">PMC7126838</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.physa.2011.05.005" target="_blank">10.1016/j.physa.2011.05.005</a></td>
<td>msgan1mk</td>
<td>0.345857</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_08.html#h1i29kcq">Li_C_2018</a></td>
<td><a href="Topic_03.html#6ev4ed6z">Chen_2019</a>, <a href="Topic_03.html#ge871ypq">Safi_2011</a></td>
</tr>
<tr>
<th id="69wk591l";>44</th>
<td>Baerwolff_2020</td>
<td>A Contribution to the Mathematical Modeling of the Corona/COVID-19 Pandemic</td>
<td>Guenter K.F. Baerwolff</td>
<td>2020</td>
<td>2020-04-06</td>
<td>BioRxiv</td>
<td>N</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.04.01.20050229" target="_blank">10.1101/2020.04.01.20050229</a></td>
<td>69wk591l</td>
<td>0.345857</td>
<td><a href="Topic_01.html#5ge7ozpd">Zheng_2020</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a></td>
<td><a href="Topic_04.html#5po7q64l">Yong_2016</a>, <a href="Topic_04.html#u5qt9es8">Kim_Y_2016</a>, <a href="Topic_03.html#5ab0hag6">Nadeau_2014</a></td>
</tr>
<tr>
<th id="z19umr4q";>45</th>
<td>Chernyshev_2020</td>
<td>Autocatalytic Model for Covid-19 Progression in a Country</td>
<td>Anatoly Chernyshev</td>
<td>2020</td>
<td>2020-04-07</td>
<td>BioRxiv</td>
<td>Y</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.04.03.20052985" target="_blank">10.1101/2020.04.03.20052985</a></td>
<td>z19umr4q</td>
<td>0.345341</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a></td>
<td><a href="Topic_02.html#nc5rtwtd">Smeets_2020</a>, <a href="Topic_04.html#5po7q64l">Yong_2016</a>, <a href="Topic_02.html#c800ynvc">Shi_P_2020</a></td>
</tr>
<tr>
<th id="04l3474h";>46</th>
<td>Buonomo_2012</td>
<td>Global stability for an HIV-1 infection model including an eclipse stage of infected cells</td>
<td>Buonomo, Bruno; Vargas-De-León, Cruz</td>
<td>2012</td>
<td>2012-01-15</td>
<td>PMC</td>
<td>N</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.jmaa.2011.07.006" target="_blank">10.1016/j.jmaa.2011.07.006</a></td>
<td>04l3474h</td>
<td>0.345341</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_07.html#7c4jsvlz">Simon_2010</a></td>
<td><a href="Topic_06.html#cycc3fvl">Fudolig_2020</a>, <a href="Topic_06.html#rmtyrbg0">Saad_2018</a>, <a href="Topic_12.html#5igf77tk">Zhang_2018</a>, <a href="Topic_06.html#bss41lae">Gao_D_2018</a></td>
</tr>
<tr>
<th id="kckzvlcw";>47</th>
<td>Shi_Y_2003</td>
<td>Stochastic dynamic model of SARS spreading</td>
<td>Shi, Yaolin</td>
<td>2003</td>
<td>2003-01-01</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7089366" target="_blank">PMC7089366</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/bf03184164" target="_blank">10.1007/bf03184164</a></td>
<td>kckzvlcw</td>
<td>0.344891</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_08.html#5w90r6wu">Wu_Q_2014</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a>, <a href="Topic_08.html#h1i29kcq">Li_C_2018</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a></td>
<td><a href="Topic_04.html#5q4leivx">Zhou_2004</a>, <a href="Topic_05.html#t0oqk88n">Naheed_2014</a>, <a href="Topic_02.html#c800ynvc">Shi_P_2020</a>, <a href="Topic_04.html#0qaoam29">Ng_T_2003</a></td>
</tr>
<tr>
<th id="98i0dwat";>48</th>
<td>Sedov_2020</td>
<td>Modeling quarantine during epidemics by mass-testing with drones</td>
<td>Leonid Sedov; Alexander Krasnochub; valentin polishchuk</td>
<td>2020</td>
<td>2020-04-20</td>
<td>BioRxiv</td>
<td>N</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.04.15.20067025" target="_blank">10.1101/2020.04.15.20067025</a></td>
<td>98i0dwat</td>
<td>0.343029</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="qqcb85uk";>49</th>
<td>Lekone_2008</td>
<td>Bayesian Analysis of Severe Acute Respiratory Syndrome: The 2003 Hong Kong Epidemic</td>
<td>Lekone, Phenyo E.</td>
<td>2008</td>
<td>2008-07-09</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7161832" target="_blank">PMC7161832</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/18615412.0" target="_blank">18615412.0</a></td>
<td><a href="https://doi.org/10.1002/bimj.200710431" target="_blank">10.1002/bimj.200710431</a></td>
<td>qqcb85uk</td>
<td>0.332185</td>
<td><a href="Topic_07.html#63wqn2fg">Höhle_2007</a>, <a href="Topic_02.html#z5pg8nij">Griette_2020</a>, <a href="Topic_02.html#a9bnafr7">Renna_2020</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a></td>
<td></td>
</tr>
<tr>
<th id="04p0c4jt";>50</th>
<td>Guo_W_2018</td>
<td>Stochastic persistence and stationary distribution in an SIS epidemic model with media coverage</td>
<td>Guo, Wenjuan; Cai, Yongli; Zhang, Qimin; Wang, Weiming</td>
<td>2018</td>
<td>2018-02-15</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7125861" target="_blank">PMC7125861</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.physa.2017.11.137" target="_blank">10.1016/j.physa.2017.11.137</a></td>
<td>04p0c4jt</td>
<td>0.331278</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_08.html#vt6nh3vs">Kenah_2007</a>, <a href="Topic_02.html#a9bnafr7">Renna_2020</a></td>
<td><a href="Topic_06.html#ru2mv7wv">Fan_K_2020</a>, <a href="Topic_06.html#qgxsd1sr">Zhao_2018</a>, <a href="Topic_06.html#774ywcj0">Chen_2018</a>, <a href="Topic_06.html#396rgxno">Yuan_2018</a></td>
</tr>
<tr>
<th id="p8fqvmy8";>51</th>
<td>Wang_2013</td>
<td>Modelling the spreading rate of controlled communicable epidemics through an entropy-based thermodynamic model</td>
<td>Wang, WenBin; Wu, ZiNiu; Wang, ChunFeng; Hu, RuiFeng</td>
<td>2013</td>
<td>2013-10-03</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7111546" target="_blank">PMC7111546</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/s11433-013-5321-0" target="_blank">10.1007/s11433-013-5321-0</a></td>
<td>p8fqvmy8</td>
<td>0.327593</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a>, <a href="Topic_07.html#63wqn2fg">Höhle_2007</a>, <a href="Topic_08.html#vt6nh3vs">Kenah_2007</a>, <a href="Topic_07.html#7c4jsvlz">Simon_2010</a></td>
<td><a href="Topic_03.html#ge871ypq">Safi_2011</a>, <a href="Topic_03.html#6ev4ed6z">Chen_2019</a>, <a href="Topic_06.html#69nnyq8p">Imran_2018</a>, <a href="Topic_02.html#c800ynvc">Shi_P_2020</a></td>
</tr>
<tr>
<th id="yjodgy2v";>52</th>
<td>Lan_G_2019</td>
<td>A stochastic SIRS epidemic model with non-monotone incidence rate under regime-switching</td>
<td>Lan, Guijie; Lin, Ziyan; Wei, Chunjin; Zhang, Shuwen</td>
<td>2019</td>
<td>2019-11-30</td>
<td>PMC</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7127215" target="_blank">PMC7127215</a></td>
<td></td>
<td><a href="https://doi.org/10.1016/j.jfranklin.2019.09.009" target="_blank">10.1016/j.jfranklin.2019.09.009</a></td>
<td>yjodgy2v</td>
<td>0.321836</td>
<td><a href="Topic_06.html#9nhauvss">Pekalp_2019</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a></td>
<td><a href="Topic_06.html#ru2mv7wv">Fan_K_2020</a>, <a href="Topic_06.html#2pw54nmn">Sirijampa_2018</a>, <a href="Topic_06.html#vpatxjd4">Li_F_2018</a>, <a href="Topic_06.html#774ywcj0">Chen_2018</a></td>
</tr>
<tr>
<th id="kgrdul35";>53</th>
<td>Shao_2020</td>
<td>Dynamic models for Coronavirus Disease 2019 and data analysis</td>
<td>Shao, Nian; Zhong, Min; Yan, Yue; Pan, HanShuang; Cheng, Jin; Chen, Wenbin</td>
<td>2020</td>
<td>2020-03-24</td>
<td>PMC</td>
<td>Y</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7168448" target="_blank">PMC7168448</a></td>
<td></td>
<td><a href="https://doi.org/10.1002/mma.6345" target="_blank">10.1002/mma.6345</a></td>
<td>kgrdul35</td>
<td>0.316921</td>
<td></td>
<td></td>
</tr>
<tr>
<th id="hp2d68su";>54</th>
<td>Cazelles_2018</td>
<td>Accounting for non-stationarity in epidemiology by embedding time-varying parameters in stochastic models</td>
<td>Cazelles, Bernard; Champagne, Clara; Dureau, Joseph</td>
<td>2018</td>
<td>2018-08-15</td>
<td>COMM-USE</td>
<td>N</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6110518" target="_blank">PMC6110518</a></td>
<td><a href="https://www.ncbi.nlm.nih.gov/pubmed/30110322.0" target="_blank">30110322.0</a></td>
<td><a href="https://doi.org/10.1371/journal.pcbi.1006211" target="_blank">10.1371/journal.pcbi.1006211</a></td>
<td>hp2d68su</td>
<td>0.316177</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_08.html#ogliq8nj">Li_K_2011</a></td>
<td><a href="Topic_03.html#ge871ypq">Safi_2011</a>, <a href="Topic_01.html#h0ivbolt">Angulo_2013</a></td>
</tr>
<tr>
<th id="7axgotby";>55</th>
<td>Labadin_2020</td>
<td>Transmission Dynamics of 2019-nCoV in Malaysia</td>
<td>Jane Labadin; Boon Hao Hong</td>
<td>2020</td>
<td>2020-02-11</td>
<td>BioRxiv</td>
<td>Y</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.02.07.20021188" target="_blank">10.1101/2020.02.07.20021188</a></td>
<td>7axgotby</td>
<td>0.313889</td>
<td><a href="Topic_09.html#evpj67t1">Bifolchi_2013</a>, <a href="Topic_07.html#7c4jsvlz">Simon_2010</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_02.html#a9bnafr7">Renna_2020</a></td>
<td><a href="Topic_02.html#c800ynvc">Shi_P_2020</a>, <a href="Topic_04.html#5po7q64l">Yong_2016</a>, <a href="Topic_14.html#y3l6k0qu">Biswas_2020</a></td>
</tr>
<tr>
<th id="qejmwvst";>56</th>
<td>Yang_2020</td>
<td>Rational evaluation of various epidemic models based on the COVID-19 data of China</td>
<td>Wuyue Yang; Dongyan Zhang; Liangrong Peng; Changjing Zhuge; Liu Hong</td>
<td>2020</td>
<td>2020-03-16</td>
<td>BioRxiv</td>
<td>Y</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.03.12.20034595" target="_blank">10.1101/2020.03.12.20034595</a></td>
<td>qejmwvst</td>
<td>0.304996</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_04.html#kry1sejo">Lloyd_2009</a>, <a href="Topic_09.html#evpj67t1">Bifolchi_2013</a></td>
<td><a href="Topic_02.html#m87tapjp">Peng_2020</a>, <a href="Topic_02.html#sflu2was">Ghaffarzadegan_2020</a>, <a href="Topic_02.html#azpz6e7q">Distante_2020</a>, <a href="Topic_02.html#9032hh5c">Caccavo_2020</a></td>
</tr>
<tr>
<th id="osez25uj";>57</th>
<td>Hackl_2020</td>
<td>Modeling the COVID-19 pandemic - parameter identification and reliability of predictions</td>
<td>Klaus Hackl</td>
<td>2020</td>
<td>2020-04-11</td>
<td>BioRxiv</td>
<td>Y</td>
<td></td>
<td></td>
<td><a href="https://doi.org/10.1101/2020.04.07.20056937" target="_blank">10.1101/2020.04.07.20056937</a></td>
<td>osez25uj</td>
<td>0.304831</td>
<td><a href="Topic_01.html#5ge7ozpd">Zheng_2020</a>, <a href="Topic_01.html#zjnlibu4">Bocharov_2018</a></td>
<td><a href="Topic_02.html#nc5rtwtd">Smeets_2020</a>, <a href="Topic_02.html#p42cgpf0">Zareie_2020</a>, <a href="Topic_02.html#m87tapjp">Peng_2020</a>, <a href="Topic_02.html#sflu2was">Ghaffarzadegan_2020</a></td>
</tr>
<tr>
<th id="63hcyb9e";>58</th>
<td>Liu_C_2020</td>
<td>D(2)EA: Depict the Epidemic Picture of COVID-19</td>
<td>Liu, Chenzhengyi; Zhao, Jingwei; Liu, Guohang; Gao, Yuanning; Gao, Xiaofeng</td>
<td>2020</td>
<td>2020-04-07</td>
<td>PMC</td>
<td>Y</td>
<td><a href="https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7137902" target="_blank">PMC7137902</a></td>
<td></td>
<td><a href="https://doi.org/10.1007/s12204-020-2170-7" target="_blank">10.1007/s12204-020-2170-7</a></td>
<td>63hcyb9e</td>
<td>0.300035</td>
<td><a href="Topic_01.html#zjnlibu4">Bocharov_2018</a>, <a href="Topic_01.html#3aa8wgr0">Chowell_2017</a>, <a href="Topic_02.html#a9bnafr7">Renna_2020</a></td>
<td><a href="Topic_02.html#m87tapjp">Peng_2020</a>, <a href="Topic_02.html#a1sk6mka">Li_S_2020</a></td>
</tr>
</tbody>
</table>
</body>
</html>
|
{
"pile_set_name": "Github"
}
|
'use strict';
var memoize = require('../..')
, nextTick = require('next-tick');
module.exports = function () {
return {
Regular: {
Success: function (a, d) {
var mfn, fn, u = {}, i = 0, invoked = 0;
fn = function (x, y, cb) {
nextTick(function () {
++i;
cb(null, x + y);
});
return u;
};
mfn = memoize(fn, { async: true });
a(mfn(3, 7, function (err, res) {
++invoked;
a.deep([err, res], [null, 10], "Result #1");
}), u, "Initial");
a(mfn(3, 7, function (err, res) {
++invoked;
a.deep([err, res], [null, 10], "Result #2");
}), u, "Initial #2");
a(mfn(5, 8, function (err, res) {
++invoked;
a.deep([err, res], [null, 13], "Result B #1");
}), u, "Initial #2");
a(mfn(3, 7, function (err, res) {
++invoked;
a.deep([err, res], [null, 10], "Result #3");
}), u, "Initial #2");
a(mfn(5, 8, function (err, res) {
++invoked;
a.deep([err, res], [null, 13], "Result B #2");
}), u, "Initial #3");
nextTick(function () {
a(i, 2, "Init Called");
a(invoked, 5, "Cb Called");
a(mfn(3, 7, function (err, res) {
++invoked;
a.deep([err, res], [null, 10], "Again: Result");
}), u, "Again: Initial");
a(mfn(5, 8, function (err, res) {
++invoked;
a.deep([err, res], [null, 13], "Again B: Result");
}), u, "Again B: Initial");
nextTick(function () {
a(i, 2, "Init Called #2");
a(invoked, 7, "Cb Called #2");
mfn.delete(3, 7);
a(mfn(3, 7, function (err, res) {
++invoked;
a.deep([err, res], [null, 10], "Again: Result");
}), u, "Again: Initial");
a(mfn(5, 8, function (err, res) {
++invoked;
a.deep([err, res], [null, 13], "Again B: Result");
}), u, "Again B: Initial");
nextTick(function () {
a(i, 3, "Init After delete");
a(invoked, 9, "Cb After delete");
d();
});
});
});
},
Error: function (a, d) {
var mfn, fn, u = {}, i = 0, e = new Error("Test");
fn = function (x, y, cb) {
nextTick(function () {
++i;
cb(e);
});
return u;
};
mfn = memoize(fn, { async: true, dispose: a.never });
a(mfn(3, 7, function (err, res) {
a.deep([err, res], [e, undefined], "Result #1");
}), u, "Initial");
a(mfn(3, 7, function (err, res) {
a.deep([err, res], [e, undefined], "Result #2");
}), u, "Initial #2");
a(mfn(5, 8, function (err, res) {
a.deep([err, res], [e, undefined], "Result B #1");
}), u, "Initial #2");
a(mfn(3, 7, function (err, res) {
a.deep([err, res], [e, undefined], "Result #3");
}), u, "Initial #2");
a(mfn(5, 8, function (err, res) {
a.deep([err, res], [e, undefined], "Result B #2");
}), u, "Initial #3");
nextTick(function () {
a(i, 2, "Called #2");
a(mfn(3, 7, function (err, res) {
a.deep([err, res], [e, undefined], "Again: Result");
}), u, "Again: Initial");
a(mfn(5, 8, function (err, res) {
a.deep([err, res], [e, undefined], "Again B: Result");
}), u, "Again B: Initial");
nextTick(function () {
a(i, 4, "Again Called #2");
d();
});
});
}
},
Primitive: {
Success: function (a, d) {
var mfn, fn, u = {}, i = 0;
fn = function (x, y, cb) {
nextTick(function () {
++i;
cb(null, x + y);
});
return u;
};
mfn = memoize(fn, { async: true, primitive: true });
a(mfn(3, 7, function (err, res) {
a.deep([err, res], [null, 10], "Result #1");
}), u, "Initial");
a(mfn(3, 7, function (err, res) {
a.deep([err, res], [null, 10], "Result #2");
}), u, "Initial #2");
a(mfn(5, 8, function (err, res) {
a.deep([err, res], [null, 13], "Result B #1");
}), u, "Initial #2");
a(mfn(3, 7, function (err, res) {
a.deep([err, res], [null, 10], "Result #3");
}), u, "Initial #2");
a(mfn(5, 8, function (err, res) {
a.deep([err, res], [null, 13], "Result B #2");
}), u, "Initial #3");
nextTick(function () {
a(i, 2, "Called #2");
a(mfn(3, 7, function (err, res) {
a.deep([err, res], [null, 10], "Again: Result");
}), u, "Again: Initial");
a(mfn(5, 8, function (err, res) {
a.deep([err, res], [null, 13], "Again B: Result");
}), u, "Again B: Initial");
nextTick(function () {
a(i, 2, "Again Called #2");
mfn.delete(3, 7);
a(mfn(3, 7, function (err, res) {
a.deep([err, res], [null, 10], "Again: Result");
}), u, "Again: Initial");
a(mfn(5, 8, function (err, res) {
a.deep([err, res], [null, 13], "Again B: Result");
}), u, "Again B: Initial");
nextTick(function () {
a(i, 3, "Call After delete");
d();
});
});
});
},
Error: function (a, d) {
var mfn, fn, u = {}, i = 0, e = new Error("Test");
fn = function (x, y, cb) {
nextTick(function () {
++i;
cb(e);
});
return u;
};
mfn = memoize(fn, { async: true, primitive: true });
a(mfn(3, 7, function (err, res) {
a.deep([err, res], [e, undefined], "Result #1");
}), u, "Initial");
a(mfn(3, 7, function (err, res) {
a.deep([err, res], [e, undefined], "Result #2");
}), u, "Initial #2");
a(mfn(5, 8, function (err, res) {
a.deep([err, res], [e, undefined], "Result B #1");
}), u, "Initial #2");
a(mfn(3, 7, function (err, res) {
a.deep([err, res], [e, undefined], "Result #3");
}), u, "Initial #2");
a(mfn(5, 8, function (err, res) {
a.deep([err, res], [e, undefined], "Result B #2");
}), u, "Initial #3");
nextTick(function () {
a(i, 2, "Called #2");
a(mfn(3, 7, function (err, res) {
a.deep([err, res], [e, undefined], "Again: Result");
}), u, "Again: Initial");
a(mfn(5, 8, function (err, res) {
a.deep([err, res], [e, undefined], "Again B: Result");
}), u, "Again B: Initial");
nextTick(function () {
a(i, 4, "Again Called #2");
d();
});
});
},
"Primitive null arg case": function (a, d) {
var x = {}, mfn = memoize(function f(id, cb) { cb(null, x); }, {
async: true,
primitive: true
});
mfn(null, function (err, res) {
a.deep([err, res], [null, x], "Args");
d();
});
}
},
"Sync Clear": function (a, d) {
var mfn, fn;
fn = function (x, cb) {
nextTick(function () {
cb(null, x);
});
};
mfn = memoize(fn, { async: true });
mfn(1, function (err, i) {
a(i, 1, "First");
});
mfn.clear();
mfn(2, function (err, i) {
a(i, 2, "Second");
d();
});
},
"Sync Clear: Primitive": function (a, d) {
var mfn, fn;
fn = function (x, cb) {
nextTick(function () {
cb(null, x);
});
};
mfn = memoize(fn, { async: true, primitive: true });
mfn(2, function (err, i) {
a(i, 2, "First");
});
mfn(1, function (err, i) {
a(i, 1, "Second");
nextTick(d);
});
mfn.clear();
mfn(2, function (err, i) {
a(i, 2, "Third");
});
}
};
};
|
{
"pile_set_name": "Github"
}
|
label, input {
display: block;
}
* {
font-family: Verdana, sans-serif;
}
html, body {
height: 100%;
margin: 0;
-webkit-app-region: drag;
}
.login-form {
height: 100%;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
}
.login-form > div {
margin-bottom: 30px;
}
.login-form > div > label {
margin-bottom: 25px;
letter-spacing: 5px;
font-weight: 100;
text-transform: uppercase;
font-size: 18px;
text-align: center;
color: rgba(0, 0, 0, 0.7);
}
.login-form > div > input {
height: 30px;
width: 225px;
font-size: 16px;
-webkit-app-region: no-drag;
}
.login-form > div > button {
border: 0;
font-size: 15px;
font-weight: 100;
text-transform: uppercase;
height: 35px;
width: 225px;
background-color: #2196F3;
color: white;
-webkit-app-region: no-drag;
}
button:hover {
background-color: #0D47A1;
}
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (c) 2014 Balabit
* Copyright (c) 2014 Gergely Nagy <algernon@balabit.hu>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*
* As an additional exemption you are allowed to compile & link against the
* OpenSSL libraries as published by the OpenSSL project. See the file
* COPYING for details.
*/
static formatter_map_t passwd_field_map[] =
{
{ "name", _getent_format_string, offsetof(struct passwd, pw_name) },
{ "uid", _getent_format_uid_gid, offsetof(struct passwd, pw_uid) },
{ "gid", _getent_format_uid_gid, offsetof(struct passwd, pw_gid) },
{ "gecos", _getent_format_string, offsetof(struct passwd, pw_gecos) },
{ "dir", _getent_format_string, offsetof(struct passwd, pw_dir) },
{ "shell", _getent_format_string, offsetof(struct passwd, pw_shell) },
{ NULL, NULL, 0 }
};
static gboolean
tf_getent_passwd(gchar *key, gchar *member_name, GString *result)
{
struct passwd pwd;
struct passwd *res;
char *buf;
long bufsize;
int s;
gint64 d;
gboolean is_num, r;
bufsize = sysconf(_SC_GETPW_R_SIZE_MAX);
if (bufsize == -1)
bufsize = 16384;
buf = g_malloc(bufsize);
if ((is_num = parse_dec_number(key, &d)) == TRUE)
s = getpwuid_r((uid_t)d, &pwd, buf, bufsize, &res);
else
s = getpwnam_r(key, &pwd, buf, bufsize, &res);
if (res == NULL && s != 0)
{
msg_error("$(getent passwd) failed",
evt_tag_str("key", key),
evt_tag_error("errno"));
g_free(buf);
return FALSE;
}
if (member_name == NULL)
{
if (is_num)
member_name = "name";
else
member_name = "uid";
}
if (res == NULL)
{
g_free(buf);
return FALSE;
}
s = _find_formatter(passwd_field_map, member_name);
if (s == -1)
{
msg_error("$(getent passwd): unknown member",
evt_tag_str("key", key),
evt_tag_str("member", member_name));
g_free(buf);
return FALSE;
}
r = passwd_field_map[s].format(member_name,
((uint8_t *)res) + passwd_field_map[s].offset,
result);
g_free(buf);
return r;
}
|
{
"pile_set_name": "Github"
}
|
/* Copyright 2019 The OpenTracing Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.opentracing.contrib.specialagent.test.aws.sdk1;
import java.util.concurrent.ThreadLocalRandom;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder;
import com.amazonaws.services.dynamodbv2.local.main.ServerRunner;
import com.amazonaws.services.dynamodbv2.local.server.DynamoDBProxyServer;
import com.amazonaws.services.dynamodbv2.model.AttributeDefinition;
import com.amazonaws.services.dynamodbv2.model.CreateTableRequest;
import com.amazonaws.services.dynamodbv2.model.KeySchemaElement;
import com.amazonaws.services.dynamodbv2.model.KeyType;
import com.amazonaws.services.dynamodbv2.model.ProvisionedThroughput;
import io.opentracing.contrib.specialagent.TestUtil;
import io.opentracing.contrib.specialagent.TestUtil.ComponentSpanCount;
public class Aws1ITest {
public static void main(final String[] args) throws Exception {
System.getProperties().setProperty("sqlite4java.library.path", "src/test/resources/libs");
final DynamoDBProxyServer server = ServerRunner.createServerFromCommandLineArgs(new String[] {"-inMemory", "-port", "8000"});
server.start();
final AmazonDynamoDB dbClient = buildClient();
try {
createTable(dbClient, "tableName-" + ThreadLocalRandom.current().nextLong(Long.MAX_VALUE));
}
catch (final Exception e) {
System.out.println("Exception: " + e.getMessage() + "\nIgnoring.");
}
server.stop();
dbClient.shutdown();
TestUtil.checkSpan(new ComponentSpanCount("java-aws-sdk", 1));
System.exit(0);
}
private static AmazonDynamoDB buildClient() {
final AwsClientBuilder.EndpointConfiguration endpointConfiguration = new AwsClientBuilder.EndpointConfiguration("http://localhost:8000", "us-west-2");
final BasicAWSCredentials awsCreds = new BasicAWSCredentials("access_key_id", "secret_key_id");
final AmazonDynamoDBClientBuilder builder = AmazonDynamoDBClientBuilder
.standard()
.withEndpointConfiguration(endpointConfiguration)
.withCredentials(new AWSStaticCredentialsProvider(awsCreds))
.withClientConfiguration(new ClientConfiguration().withConnectionTimeout(1));
return builder.build();
}
private static void createTable(final AmazonDynamoDB dbClient, final String tableName) {
final String partitionKeyName = tableName + "-pk";
final CreateTableRequest createTableRequest = new CreateTableRequest()
.withTableName(tableName)
.withKeySchema(new KeySchemaElement().withAttributeName(partitionKeyName).withKeyType(KeyType.HASH))
.withAttributeDefinitions(new AttributeDefinition().withAttributeName(partitionKeyName).withAttributeType("S"))
.withProvisionedThroughput(new ProvisionedThroughput().withReadCapacityUnits(10L).withWriteCapacityUnits(5L));
dbClient.createTable(createTableRequest);
System.out.println("Table " + tableName + " created");
}
}
|
{
"pile_set_name": "Github"
}
|
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, initial-scale=1.0,maximum-scale=1.0, user-scalable=no,minimal-ui">
<title>Tower</title>
<style>
* {
margin: 0;
padding: 0
}
img {
width: 100%
}
html {
background: #FFF;
height: 100%
}
body {
font-family: "Helvetica Neue", Arial, Helvetica, sans-serif;
margin: 0 auto;
text-align: center;
width: 100%;
height: 100%;
background: #F95240 url(./assets/main-bg.png)
}
@media screen and (min-height: 560px) {
html {
font-size: 100px
}
}
@media screen and (min-height: 640px) {
html {
font-size: 112.5px
}
}
@media screen and (min-height: 720px) {
html {
font-size: 125px
}
}
@media screen and (min-height: 800px) {
html {
font-size: 137.5px
}
}
@media screen and (min-height: 880px) {
html {
font-size: 150px
}
}
@media screen and (min-height: 960px) {
html {
font-size: 162.5px
}
}
@media screen and (min-height: 1040px) {
html {
font-size: 180px
}
}
@media screen and (min-height: 1200px) {
html {
font-size: 200px
}
}
html {
font-size: 17.6vh
}
#canvas {
position: fixed;
left: 0;
top: 0;
right: 0;
bottom: 0;
margin: auto;
}
a {
text-decoration: none
}
li, ul, ol {
list-style-type: none;
padding: 0;
margin: 0
}
.hide {
display: none
}
.clear {
clear: both
}
.loading {
background-color: #F05A50;
height: 100%;
width: 100%;
}
.loading .main {
width: 60%;
margin: 0 auto;
color: #FFF
}
.loading .main img {
width: 60%;
margin: 1rem auto 0
}
.loading .main .title {
font-size: .3rem
}
.loading .main .text {
font-size: .15rem
}
.loading .main .bar {
height: .12rem;
width: 100%;
border: 3px solid #FFF;
border-radius: .6rem;
margin: .1rem 0;
}
.loading .main .bar .sub {
height: .1rem;
width: 98%;
margin: .008rem auto 0;
}
.loading .main .bar .percent {
height: 100%;
width: 0;
background-color: #FFF;
border-radius: .6rem;
}
.loading .logo {
position: absolute;
bottom: .3rem;
left: 0;
right: 0
}
.loading .logo img {
width: 1rem
}
.content {
height: 100vh;
margin: 0 auto;
position: relative;
}
.landing .title {
width: 60%;
}
.landing .logo {
width: 30%;
position: absolute;
right: .2rem;
top: .2rem;
}
.landing .action-2 {
position: absolute;
bottom: .2rem;
width: 100%;
}
.landing .start {
width: 65%;
}
.slideTop {
-webkit-animation: st 1s ease-in-out;
animation: st 1s ease-in-out;
}
@-webkit-keyframes st {
0% {
transform: translateZ(0)
}
100% {
transform: translate3d(0, -100%, 0)
}
}
@keyframes st {
0% {
transform: translateZ(0)
}
100% {
transform: translate3d(0, -100%, 0)
}
}
.slideBottom {
-webkit-animation: sb 1s ease-in-out;
animation: sb 1s ease-in-out;
}
@-webkit-keyframes sb {
0% {
transform: translateZ(0)
}
100% {
transform: translate3d(0, 200%, 0)
}
}
@keyframes sb {
0% {
transform: translateZ(0)
}
100% {
transform: translate3d(0, 200%, 0)
}
}
.swing {
-webkit-animation: sw 2s ease-in-out alternate infinite;
animation: sw 2s ease-in-out alternate infinite;
}
@-webkit-keyframes sw {
0% {
transform: rotate(5deg);
transform-origin: top center;
}
100% {
transform: rotate(-5deg);
transform-origin: top center;
}
}
@keyframes sw {
0% {
transform: rotate(5deg);
transform-origin: top center;
}
100% {
transform: rotate(-5deg);
transform-origin: top center;
}
}
.modal .mask {
background-color: #000;
opacity: .6;
position: fixed;
height: 100%;
width: 100%;
top: 0;
left: 0;
}
.modal .modal-content {
position: fixed;
height: 100%;
width: 90%;
margin-top: .3rem;
top: 0;
}
.modal .main {
width: 85%;
margin: 0 auto;
}
.modal .container {
position: relative
}
.modal .bg {
width: 100%;
position: absolute;
top: 0;
left: 0
}
.modal .modal-main {
width: 100%;
position: absolute;
top: 0;
left: 0;
margin-top: -0.4rem;
}
.modal .over-img {
width: 45%;
margin: .8rem auto 0
}
.modal .over-score {
margin-top: -0.2rem;
font-size: .5rem;
color: #FF735C;
text-shadow: -2px -2px 0 #FFF, 2px -2px 0 #FFF, -2px 2px 0 #FFF, 2px 2px 0 #FFF;
}
.modal .tip {
font-size: .16rem;
color: #9B724E;
}
.modal .over-button-b {
width: 70%;
margin: 0.1rem auto 0
}
.wxShare {
background: #000;
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
z-index: 11;
opacity: .9
}
.wxShare img {
width: 50%;
float: right;
margin: 10px 10px 0 0
}
@font-face {
font-family: 'wenxue';
src: url('./assets/wenxue.eot');
src: url('./assets/wenxue.eot'),
url('./assets/wenxue.woff'),
url('./assets/wenxue.ttf'),
url('./assets/wenxue.svg');
}
.font-wenxue {
font-family: 'wenxue';
}
</style>
</head>
<body>
<canvas id="canvas" class="hide"></canvas>
<div class="content">
<div class="loading">
<div class="main"><img
src="./assets/main-loading.gif">
<div class="progress">
<div class="title font-wenxue">0%</div>
<div class="bar">
<div class="sub">
<div class="percent"></div>
</div>
</div>
<div class="text">加载中</div>
</div>
</div>
</div>
<div class="landing hide">
<div class="action-1">
<img
src="./assets/main-index-title.png"
class="title swing">
</div>
<div class="action-2"><img id="start"
src="./assets/main-index-start.png"
class="start"></div>
</div>
<div id="modal" class="modal hide">
<div class="mask"></div>
<div class="js-modal-content modal-content">
<div class="main">
<div class="container"><img
src="./assets/main-modal-bg.png"
class="bg">
<div class="modal-main">
<div id="over-modal" class="hide js-modal-card"><img
src="./assets/main-modal-over.png"
class="over-img">
<div id="score" class="over-score font-wenxue"></div>
<div id="over-zero" class="hide">
<div class="tip"><p>再来一次吧!</p>
<img
src="./assets/main-modal-again-b.png"
class="over-button-b js-reload"><img
src="./assets/main-modal-invite-b.png"
class="over-button-b js-invite"></div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="wxShare hide">
<img src="./assets/main-share-icon.png">
</div>
</div>
<script src="./dist/main.js"></script>
<script src="./assets/zepto-1.1.6.min.js"></script>
<script>
var domReady, loadFinish, canvasReady, loadError, gameStart, game, score, successCount
// init window height and width
var gameWidth = window.innerWidth
var gameHeight = window.innerHeight
var ratio = 1.5
if (gameHeight / gameWidth < ratio) {
gameWidth = Math.ceil(gameHeight / ratio)
}
$('.content').css({ "height": gameHeight + "px", "width": gameWidth + "px" })
$('.js-modal-content').css({ "width": gameWidth + "px" })
// loading animation
function hideLoading() {
if (domReady && canvasReady) {
$('#canvas').show()
loadFinish = true
setTimeout(function () {
$('.loading').hide()
$('.landing').show()
}, 1000)
}
}
function updateLoading(status) {
var success = status.success
var total = status.total
var failed = status.failed
if (failed > 0 && !loadError) {
loadError = true
alert('加载失败 请刷新后重试')
return
}
var percent = parseInt((success / total) * 100);
if (percent === 100 && !canvasReady) {
canvasReady = true
hideLoading()
}
percent = percent > 98 ? 98 : percent
percent = percent + '%'
$('.loading .title').text(percent);
$('.loading .percent').css({
'width': percent
})
}
function overShowOver() {
$('#modal').show()
$('#over-modal').show()
$('#over-zero').show()
}
// game customization options
const option = {
width: gameWidth,
height: gameHeight,
canvasId: 'canvas',
soundOn: true,
setGameScore: function (s) {
score = s
},
setGameSuccess: function (s) {
successCount = s
},
setGameFailed: function (f) {
$('#score').text(score)
if (f >= 3) overShowOver()
}
}
// game init with option
function gameReady() {
game = TowerGame(option)
game.load(function () {
game.playBgm()
game.init()
}, updateLoading)
}
var isWechat = navigator.userAgent.toLowerCase().indexOf("micromessenger") !== -1
if (isWechat) {
document.addEventListener("WeixinJSBridgeReady", gameReady, false)
} else {
gameReady()
}
function indexHide() {
$('.landing .action-1').addClass('slideTop')
$('.landing .action-2').addClass('slideBottom')
setTimeout(function () {
$('.landing').hide()
}, 950)
}
// click event
$('#start').on('click', function () {
if (gameStart) return
gameStart = true
indexHide()
setTimeout(game.start, 400)
})
$('.js-reload').on('click', function () {
window.location.href = window.location.href + '?s=' + (+new Date())
})
$('.js-invite').on('click', function () {
$('.wxShare').show()
})
$('.wxShare').on('click', function () {
$('.wxShare').hide()
})
// listener
window.addEventListener('load', function () {
domReady = true
hideLoading()
}, false);
</script>
<script>
(function (i, s, o, g, r, a, m) {
i['GoogleAnalyticsObject'] = r;
i[r] = i[r] || function () {
(i[r].q = i[r].q || []).push(arguments)
}, i[r].l = 1 * new Date();
a = s.createElement(o),
m = s.getElementsByTagName(o)[0];
a.async = 1;
a.src = g;
m.parentNode.insertBefore(a, m)
})(window, document, 'script', '//www.google-analytics.com/analytics.js', 'ga');
ga('create', 'UA-46444752-20', 'auto');
ga('send', 'pageview');
var _hmt = _hmt || [];
(function () {
var hm = document.createElement("script");
hm.src = "https://hm.baidu.com/hm.js?c1b044f909411ac4213045f0478e96fc";
var s = document.getElementsByTagName("script")[0];
s.parentNode.insertBefore(hm, s);
})();
</script>
</body>
</html>
|
{
"pile_set_name": "Github"
}
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en">
<head>
<title>RedmineWikiFormatting (Markdown)</title>
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
<link rel="stylesheet" type="text/css" href="../wiki_syntax_detailed.css" />
</head>
<body>
<h1><a name="1" class="wiki-page"></a>Wiki formatting (Markdown)</h1>
<ul class='toc'>
<li><a href='#2'>Links</a></li>
<ul>
<li><a href='#3'>Redmine links</a></li>
<li><a href='#4'>External links</a></li>
</ul>
<li><a href='#5'>Text formatting</a></li>
<ul>
<li><a href='#6'>Font style</a></li>
<li><a href='#7'>Inline images</a></li>
<li><a href='#8'>Headings</a></li>
<li><a href='#10'>Blockquotes</a></li>
<li><a href='#11'>Table of content</a></li>
<li><a href='#14'>Horizontal Rule</a></li>
</ul>
<li><a href='#12'>Macros</a></li>
<li><a href='#13'>Code highlighting</a></li>
</ul>
<h2><a name="2" class="wiki-page"></a>Links</h2>
<h3><a name="3" class="wiki-page"></a>Redmine links</h3>
<p>Redmine allows hyperlinking between resources (issues, changesets, wiki pages...) from anywhere wiki formatting is used.</p>
<ul>
<li>Link to an issue: <strong>#124</strong> (displays <del><a href="#" class="issue" title="bulk edit doesn't change the category or fixed version properties (Closed)">#124</a></del>, link is striked-through if the issue is closed)</li>
<li>Link to an issue including tracker name and subject: <strong>##124</strong> (displays <a href="#" class="issue" title="bulk edit doesn't change the category or fixed version properties (New)">Bug #124</a>: bulk edit doesn't change the category or fixed version properties)</li>
<li>Link to an issue note: <strong>#124-6</strong>, or <strong>#124#note-6</strong></li>
<li>Link to an issue note within the same issue: <strong>#note-6</strong></li>
</ul>
<p>Wiki links:</p>
<ul>
<li><strong>[[Guide]]</strong> displays a link to the page named 'Guide': <a href="#" class="wiki-page">Guide</a></li>
<li><strong>[[Guide#further-reading]]</strong> takes you to the anchor "further-reading". Headings get automatically assigned anchors so that you can refer to them: <a href="#" class="wiki-page">Guide</a></li>
<li><strong>[[#further-reading]]</strong> link to the anchor "further-reading" of the current page: <a href="#" class="wiki-page">#further-reading</a></li>
<li><strong>[[Guide|User manual]]</strong> displays a link to the same page but with a different text: <a href="#" class="wiki-page">User manual</a></li>
</ul>
<p>You can also link to pages of an other project wiki:</p>
<ul>
<li><strong>[[sandbox:some page]]</strong> displays a link to the page named 'Some page' of the Sandbox wiki</li>
<li><strong>[[sandbox:]]</strong> displays a link to the Sandbox wiki main page</li>
</ul>
<p>Wiki links are displayed in red if the page doesn't exist yet, eg: <a href="#" class="wiki-page new">Nonexistent page</a>.</p>
<p>Links to other resources:</p>
<ul>
<li>Documents:
<ul>
<li><strong>document#17</strong> (link to document with id 17)</li>
<li><strong>document:Greetings</strong> (link to the document with title "Greetings")</li>
<li><strong>document:"Some document"</strong> (double quotes can be used when document title contains spaces)</li>
<li><strong>sandbox:document:"Some document"</strong> (link to a document with title "Some document" in other project "sandbox")</li>
</ul>
</li>
</ul>
<ul>
<li>Versions:
<ul>
<li><strong>version#3</strong> (link to version with id 3)</li>
<li><strong>version:1.0.0</strong> (link to version named "1.0.0")</li>
<li><strong>version:"1.0 beta 2"</strong></li>
<li><strong>sandbox:version:1.0.0</strong> (link to version "1.0.0" in the project "sandbox")</li>
</ul>
</li>
</ul>
<ul>
<li>Attachments:
<ul>
<li><strong>attachment:file.zip</strong> (link to the attachment of the current object named file.zip)</li>
<li>For now, attachments of the current object can be referenced only (if you're on an issue, it's possible to reference attachments of this issue only)</li>
</ul>
</li>
</ul>
<ul>
<li>Changesets:
<ul>
<li><strong>r758</strong> (link to a changeset)</li>
<li><strong>commit:c6f4d0fd</strong> (link to a changeset with a non-numeric hash)</li>
<li><strong>svn1|r758</strong> (link to a changeset of a specific repository, for projects with multiple repositories)</li>
<li><strong>commit:hg|c6f4d0fd</strong> (link to a changeset with a non-numeric hash of a specific repository)</li>
<li><strong>sandbox:r758</strong> (link to a changeset of another project)</li>
<li><strong>sandbox:commit:c6f4d0fd</strong> (link to a changeset with a non-numeric hash of another project)</li>
</ul>
</li>
</ul>
<ul>
<li>Repository files:
<ul>
<li><strong>source:some/file</strong> (link to the file located at /some/file in the project's repository)</li>
<li><strong>source:some/file@52</strong> (link to the file's revision 52)</li>
<li><strong>source:some/file#L120</strong> (link to line 120 of the file)</li>
<li><strong>source:some/file@52#L120</strong> (link to line 120 of the file's revision 52)</li>
<li><strong>source:"some file@52#L120"</strong> (use double quotes when the URL contains spaces</li>
<li><strong>export:some/file</strong> (force the download of the file)</li>
<li><strong>source:svn1|some/file</strong> (link to a file of a specific repository, for projects with multiple repositories)</li>
<li><strong>sandbox:source:some/file</strong> (link to the file located at /some/file in the repository of the project "sandbox")</li>
<li><strong>sandbox:export:some/file</strong> (force the download of the file)</li>
</ul>
</li>
</ul>
<ul>
<li>Forums:
<ul>
<li><strong>forum#1</strong> (link to forum with id 1</li>
<li><strong>forum:Support</strong> (link to forum named Support)</li>
<li><strong>forum:"Technical Support"</strong> (use double quotes if forum name contains spaces)</li>
</ul>
</li>
</ul>
<ul>
<li>Forum messages:
<ul>
<li><strong>message#1218</strong> (link to message with id 1218)</li>
</ul>
</li>
</ul>
<ul>
<li>Projects:
<ul>
<li><strong>project#3</strong> (link to project with id 3)</li>
<li><strong>project:some-project</strong> (link to project with name or slug of "some-project")</li>
<li><strong>project:"Some Project"</strong> (use double quotes for project name containing spaces)</li>
</ul>
</li>
</ul>
<ul>
<li>News:
<ul>
<li><strong>news#2</strong> (link to news item with id 2)</li>
<li><strong>news:Greetings</strong> (link to news item named "Greetings")</li>
<li><strong>news:"First Release"</strong> (use double quotes if news item name contains spaces)</li>
</ul>
</li>
</ul>
<ul>
<li>Users:
<ul>
<li><strong>user#2</strong> (link to user with id 2)</li>
<li><strong>user:jsmith</strong> (Link to user with login jsmith)</li>
<li><strong>@jsmith</strong> (Link to user with login jsmith)</li>
</ul>
</li>
</ul>
<p>Escaping:</p>
<ul>
<li>You can prevent Redmine links from being parsed by preceding them with an exclamation mark: !</li>
</ul>
<h3><a name="4" class="wiki-page"></a>External links</h3>
<p>URLs (starting with: www, http, https, ftp, ftps, sftp and sftps) and email addresses are automatically turned into clickable links:</p>
<pre>
http://www.redmine.org, someone@foo.bar
</pre>
<p>displays: <a class="external" href="http://www.redmine.org">http://www.redmine.org</a>, <a href="mailto:someone@foo.bar" class="email">someone@foo.bar</a></p>
<p>If you want to display a specific text instead of the URL, you can use the standard markdown syntax:</p>
<pre>
[Redmine web site](http://www.redmine.org)
</pre>
<p>displays: <a href="http://www.redmine.org" class="external">Redmine web site</a></p>
<h2><a name="5" class="wiki-page"></a>Text formatting</h2>
<p>For things such as headlines, bold, tables, lists, Redmine supports Markdown syntax. See <a class="external" href="http://daringfireball.net/projects/markdown/syntax">http://daringfireball.net/projects/markdown/syntax</a> for information on using any of these features. A few samples are included below, but the engine is capable of much more of that.</p>
<h3><a name="6" class="wiki-page"></a>Font style</h3>
<pre>
* **bold**
* *Italic*
* ***bold italic***
* _underline_
* ~~strike-through~~
</pre>
<p>Display:</p>
<ul>
<li><strong>bold</strong></li>
<li><em>italic</em></li>
<li><em><strong>bold italic</strong></em></li>
<li><u>underline</u></li>
<li><del>strike-through</del></li>
</ul>
<h3><a name="7" class="wiki-page"></a>Inline images</h3>
<ul>
<li><strong></strong> displays an image located at image_url (markdown syntax)</li>
<li>If you have an image attached to your wiki page, it can be displayed inline using its filename: <strong></strong></li>
<li>Images in your computer's clipboard can be pasted directly using Ctrl-v or Command-v (note that Internet Explorer is not supported).</li>
<li>Image files can be dragged onto the text area in order to be uploaded and embedded.</li>
</ul>
<h3><a name="8" class="wiki-page"></a>Headings</h3>
<pre>
# Heading
## Subheading
### Subsubheading
</pre>
<p>Redmine assigns an anchor to each of those headings thus you can link to them with "#Heading", "#Subheading" and so forth.</p>
<h3><a name="10" class="wiki-page"></a>Blockquotes</h3>
<p>Start the paragraph with <strong>></strong></p>
<pre>
> Rails is a full-stack framework for developing database-backed web applications according to the Model-View-Control pattern.
To go live, all you need to add is a database and a web server.
</pre>
<p>Display:</p>
<blockquote>
<p>Rails is a full-stack framework for developing database-backed web applications according to the Model-View-Control pattern.<br />To go live, all you need to add is a database and a web server.</p>
</blockquote>
<h3><a name="11" class="wiki-page"></a>Table of content</h3>
<pre>
{{toc}} => left aligned toc
{{>toc}} => right aligned toc
</pre>
<h3><a name="14" class="wiki-page"></a>Horizontal Rule</h3>
<pre>
---
</pre>
<h2><a name="12" class="wiki-page"></a>Macros</h2>
<p>Redmine has the following builtin macros:</p>
<p>
<dl>
<dt><code>hello_world</code></dt>
<dd><p>Sample macro.</p></dd>
<dt><code>macro_list</code></dt>
<dd><p>Displays a list of all available macros, including description if available.</p></dd>
<dt><code>child_pages</code></dt>
<dd><p>Displays a list of child pages. With no argument, it displays the child pages of the current wiki page. Examples:</p>
<pre><code>{{child_pages}} -- can be used from a wiki page only
{{child_pages(depth=2)}} -- display 2 levels nesting only</code></pre></dd>
<dt><code>include</code></dt>
<dd><p>Include a wiki page. Example:</p>
<pre><code>{{include(Foo)}}</code></pre>
<p>or to include a page of a specific project wiki:</p>
<pre><code>{{include(projectname:Foo)}}</code></pre></dd>
<dt><code>collapse</code></dt>
<dd><p>Inserts of collapsed block of text. Example:</p>
<pre><code>{{collapse(View details...)
This is a block of text that is collapsed by default.
It can be expanded by clicking a link.
}}</code></pre></dd>
<dt><code>thumbnail</code></dt>
<dd><p>Displays a clickable thumbnail of an attached image. Examples:</p>
<pre>{{thumbnail(image.png)}}
{{thumbnail(image.png, size=300, title=Thumbnail)}}</pre></dd>
<dt><code>issue</code></dt>
<dd><p>Inserts a link to an issue with flexible text. Examples:</p>
<pre>{{issue(123)}} -- Issue #123: Enhance macro capabilities
{{issue(123, project=true)}} -- Andromeda - Issue #123:Enhance macro capabilities
{{issue(123, tracker=false)}} -- #123: Enhance macro capabilities
{{issue(123, subject=false, project=true)}} -- Andromeda - Issue #123</pre></dd>
</dl>
</p>
<h2><a name="13" class="wiki-page"></a>Code highlighting</h2>
<p>Default code highlightment relies on <a href="http://rouge.jneen.net/" class="external">Rouge</a>, a syntax highlighting library written in pure Ruby. It supports many commonly used languages such as <strong>c</strong>, <strong>cpp</strong> (c++), <strong>csharp</strong> (c#, cs), <strong>css</strong>, <strong>diff</strong> (patch, udiff), <strong>go</strong> (golang), <strong>groovy</strong>, <strong>html</strong>, <strong>java</strong>, <strong>javascript</strong> (js), <strong>kotlin</strong>, <strong>objective_c</strong> (objc), <strong>perl</strong> (pl), <strong>php</strong>, <strong>python</strong> (py), <strong>r</strong>, <strong>ruby</strong> (rb), <strong>sass</strong>, <strong>scala</strong>, <strong>shell</strong> (bash, zsh, ksh, sh), <strong>sql</strong>, <strong>swift</strong>, <strong>xml</strong> and <strong>yaml</strong> (yml) languages, where the names inside parentheses are aliases. Please refer to <a href="https://www.redmine.org/projects/redmine/wiki/RedmineCodeHighlightingLanguages" class="external">https://www.redmine.org/projects/redmine/wiki/RedmineCodeHighlightingLanguages</a> for the full list of supported languages.</p>
<p>You can highlight code at any place that supports wiki formatting using this syntax (note that the language name or alias is case-insensitive):</p>
<pre>
``` ruby
Place your code here.
```
</pre>
<p>Example:</p>
<pre><code class="ruby syntaxhl"><span class="c1"># The Greeter class</span>
<span class="k">class</span> <span class="nc">Greeter</span>
<span class="k">def</span> <span class="nf">initialize</span><span class="p">(</span><span class="nb">name</span><span class="p">)</span>
<span class="vi">@name</span> <span class="o">=</span> <span class="nb">name</span><span class="p">.</span><span class="nf">capitalize</span>
<span class="k">end</span>
<span class="k">def</span> <span class="nf">salute</span>
<span class="nb">puts</span> <span class="s2">"Hello </span><span class="si">#{</span><span class="vi">@name</span><span class="si">}</span><span class="s2">!"</span>
<span class="k">end</span>
<span class="k">end</span>
</code></pre>
</body>
</html>
|
{
"pile_set_name": "Github"
}
|
/*******************************************************************************
* AMetal
* ----------------------------
* innovating embedded platform
*
* Copyright (c) 2001-2018 Guangzhou ZHIYUAN Electronics Co., Ltd.
* All rights reserved.
*
* Contact information:
* web site: http://www.zlg.cn/
*******************************************************************************/
/**
* \file
* \brief AES 加密解密例程,通过 HW 层接口实现
*
* - 实验现象:
*
* AES对数据加密然后解密,如果解密出来数据与加密前数据一样,则串口打印“Aes ency-decy success!\r\n”。
* 反之打印"Aes ency-decy failed!\r\n"。
*
* \note
* 1. 如需观察串口打印的调试信息,需要将 PIOA_10 引脚连接 PC 串口的 TXD,
* PIOA_9 引脚连接 PC 串口的 RXD;
* 2. 如果调试串口使用与本例程相同,则不应在后续继续使用调试信息输出函数
* (如:AM_DBG_INFO())。
*
* \par 源代码
* \snippet demo_hc32l19x_core_aes_ency_decy.c src_hc32l19x_core_aes_ency_decy
*
*
* \internal
* \par Modification History
* - 1.00 19-10-09 ly, first implementation
* \endinternal
*/
/**
* \addtogroup demo_if_hc32l19x_core_aes_ency_decy
* \copydoc demo_hc32l19x_core_aes_ency_decy.c
*/
/** [src_hc32l19x_core_aes_ency_decy] */
#include "ametal.h"
#include "am_hc32.h"
#include "am_gpio.h"
#include "hc32_pin.h"
#include "am_hc32_aes.h"
#include "demo_hc32_entries.h"
/**
* \brief 密钥长度
*/
#define AES_KEY_LEN AM_HC32_AES_KEY_LEN_128_BIT
/*******************************************************************************
* 全局变量
******************************************************************************/
/**< \brief 存放待加密的数据*/
static uint32_t aes_data[4] = {0x33221100, 0x77665544, 0xBBAA9988, 0xFFEEDDCC};
/**< \brief AES密匙 */
static uint32_t aes_key[8] = {0x03020100, 0x07060504, 0x0B0A0908, 0x0F0E0D0C,\
0x13121110, 0x17161514, 0x1B1A1918, 0x1F1E1D1C};
/**
* \brief 例程入口
*/
void demo_hc32l19x_core_hw_aes_ency_decy_entry (void)
{
AM_DBG_INFO("Test start: demo-aes-ency-decy test !\r\n");
/* 开启AES时钟 */
am_clk_enable (CLK_AES);
/* 例程入口 */
demo_hc32_hw_aes_ency_decy_entry(HC32_AES,
AES_KEY_LEN,
aes_data,
aes_key);
}
/* end of file */
|
{
"pile_set_name": "Github"
}
|
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 1997-2018 Oracle and/or its affiliates. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can
* obtain a copy of the License at
* https://oss.oracle.com/licenses/CDDL+GPL-1.1
* or LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at LICENSE.txt.
*
* GPL Classpath Exception:
* Oracle designates this particular file as subject to the "Classpath"
* exception as provided by Oracle in the GPL Version 2 section of the License
* file that accompanied this code.
*
* Modifications:
* If applicable, add the following below the License Header, with the fields
* enclosed by brackets [] replaced by your own identifying information:
* "Portions Copyright [year] [name of copyright owner]"
*
* Contributor(s):
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.glassfish.tests.webapi;
import java.io.PrintStream;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
public final class TestListener
implements ServletContextListener
{
public static String msg = "Context not YET initialized";
public void contextInitialized(ServletContextEvent event)
{
System.out.println("TestListener : contextInitialized called");
try
{
System.out.println("TestListener : Trying to load TestCacaoList");
Class c = Class.forName("TestCacaoList");
msg = "Class TestCacaoList loaded successfully from listener";
System.out.println(msg);
}
catch (Exception ex)
{
msg = "Exception while loading class TestCacaoList from listener : " + ex.toString();
System.out.println(msg);
}
System.out.println("TestListener : contextInitialized DONE");
}
public void contextDestroyed(ServletContextEvent event) {}
}
|
{
"pile_set_name": "Github"
}
|
require("dot").process({
global: "_page.render"
, destination: __dirname + "/render/"
, path: (__dirname + "/../templates")
});
var express = require('express')
, http = require('http')
, app = express()
, render = require('./render')
;
app.get('/', function(req, res){
res.send(render.dashboard({text:"Good morning!"}));
});
app.use(function(err, req, res, next) {
console.error(err.stack);
res.status(500).send('Something broke!');
});
var httpServer = http.createServer(app);
httpServer.listen(3000, function() {
console.log('Listening on port %d', httpServer.address().port);
});
|
{
"pile_set_name": "Github"
}
|
// Code generated by github.com/fjl/gencodec. DO NOT EDIT.
package whisperv6
import (
"encoding/json"
"github.com/ethereum/go-ethereum/common/hexutil"
)
var _ = (*criteriaOverride)(nil)
func (c Criteria) MarshalJSON() ([]byte, error) {
type Criteria struct {
SymKeyID string `json:"symKeyID"`
PrivateKeyID string `json:"privateKeyID"`
Sig hexutil.Bytes `json:"sig"`
MinPow float64 `json:"minPow"`
Topics []TopicType `json:"topics"`
AllowP2P bool `json:"allowP2P"`
}
var enc Criteria
enc.SymKeyID = c.SymKeyID
enc.PrivateKeyID = c.PrivateKeyID
enc.Sig = c.Sig
enc.MinPow = c.MinPow
enc.Topics = c.Topics
enc.AllowP2P = c.AllowP2P
return json.Marshal(&enc)
}
func (c *Criteria) UnmarshalJSON(input []byte) error {
type Criteria struct {
SymKeyID *string `json:"symKeyID"`
PrivateKeyID *string `json:"privateKeyID"`
Sig hexutil.Bytes `json:"sig"`
MinPow *float64 `json:"minPow"`
Topics []TopicType `json:"topics"`
AllowP2P *bool `json:"allowP2P"`
}
var dec Criteria
if err := json.Unmarshal(input, &dec); err != nil {
return err
}
if dec.SymKeyID != nil {
c.SymKeyID = *dec.SymKeyID
}
if dec.PrivateKeyID != nil {
c.PrivateKeyID = *dec.PrivateKeyID
}
if dec.Sig != nil {
c.Sig = dec.Sig
}
if dec.MinPow != nil {
c.MinPow = *dec.MinPow
}
if dec.Topics != nil {
c.Topics = dec.Topics
}
if dec.AllowP2P != nil {
c.AllowP2P = *dec.AllowP2P
}
return nil
}
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key>
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>${CURRENT_PROJECT_VERSION}</string>
<key>NSHumanReadableCopyright</key>
<string>Copyright © 2014 Jeff Hui. All rights reserved.</string>
<key>NSPrincipalClass</key>
<string></string>
</dict>
</plist>
|
{
"pile_set_name": "Github"
}
|
applies the changes timesheet ca aplica els canvis
by timesheet ca per
check all timesheet ca Comprova-ho tot
create new links timesheet ca Crear nous enllaços
creates a new field timesheet ca crea un camp nou
custom fields timesheet ca Camps personalitzats
deleted timesheet ca eliminat
deletes this field timesheet ca esborra aquest camp
determines the order the fields are displayed timesheet ca determina l'ordre en que es mostren els camps
each value is a line like <id>[=<label>] timesheet ca cada valor és una línia com <id>[=<etiqueta>]
edit status timesheet ca Editar Estat
edit this entry timesheet ca Editar aquesta entrada
end timesheet ca Fi
entry saved timesheet ca Entrada guardada
existing links timesheet ca Enllaços existents
field must not be empty !!! timesheet ca El camp no ha d'estar buit !!!
global categories timesheet ca Categories globals
history timesheet ca Història
id timesheet ca id
last modified timesheet ca Darrera modificació
leaves without saveing timesheet ca Sortir sense desar
length<br>rows timesheet ca Longitud<br>Files
links timesheet ca Enllaços
max length of the input [, length of the inputfield (optional)] timesheet ca longitud màxima de l'entrada [, longitud del camp de text (opcional)]
modified timesheet ca Modificat
no details timesheet ca sense detalls
number of row for a multiline inputfield or line of a multi-select-box timesheet ca número de files per a un camp de text de més d'una selecció
order timesheet ca Ordre
parent timesheet ca Superior
price timesheet ca Preu
quantity timesheet ca Quantitat
saves the changes made timesheet ca guarda els canvis fets
saves the changes made and leaves timesheet ca Desar els canvis fets i sortir
select a price timesheet ca Selecciona un preu
select a project timesheet ca Selecciona un projecte
start timesheet ca Inici
the name used internaly (<= 20 chars), changeing it makes existing data unavailible timesheet ca el nom utilitzat internament (<= 20 caràcters), si el canvies fas que les dades existents siguin inaccessibles.
the text displayed to the user timesheet ca el text mostrat a l'usuari
unitprice timesheet ca Unitat de Preu
values for selectbox timesheet ca Valors per a llista desplegable
week timesheet ca Setmana
your database is not up to date (%1 vs. %2), please run %3setup%4 to update your database. timesheet ca La teva base de dades NO està al dia (%1 vs. %2), per favor, executa %3setup%4 per actualitzar la teva base de dades.
|
{
"pile_set_name": "Github"
}
|
2018-06-17: Version 4.0.1
* Fix parsing async get/set in a class (issue 1861, 1875)
* Account for different return statement argument (issue 1829, 1897, 1928)
* Correct the handling of HTML comment when parsing a module (issue 1841)
* Fix incorrect parse async with proto-identifier-shorthand (issue 1847)
* Fix negative column in binary expression (issue 1844)
* Fix incorrect YieldExpression in object methods (issue 1834)
* Various documentation fixes
2017-06-10: Version 4.0.0
* Support ES2017 async function and await expression (issue 1079)
* Support ES2017 trailing commas in function parameters (issue 1550)
* Explicitly distinguish parsing a module vs a script (issue 1576)
* Fix JSX non-empty container (issue 1786)
* Allow JSX element in a yield expression (issue 1765)
* Allow `in` expression in a concise body with a function body (issue 1793)
* Setter function argument must not be a rest parameter (issue 1693)
* Limit strict mode directive to functions with a simple parameter list (issue 1677)
* Prohibit any escape sequence in a reserved word (issue 1612)
* Only permit hex digits in hex escape sequence (issue 1619)
* Prohibit labelled class/generator/function declaration (issue 1484)
* Limit function declaration as if statement clause only in non-strict mode (issue 1657)
* Tolerate missing ) in a with and do-while statement (issue 1481)
2016-12-22: Version 3.1.3
* Support binding patterns as rest element (issue 1681)
* Account for different possible arguments of a yield expression (issue 1469)
2016-11-24: Version 3.1.2
* Ensure that import specifier is more restrictive (issue 1615)
* Fix duplicated JSX tokens (issue 1613)
* Scan template literal in a JSX expression container (issue 1622)
* Improve XHTML entity scanning in JSX (issue 1629)
2016-10-31: Version 3.1.1
* Fix assignment expression problem in an export declaration (issue 1596)
* Fix incorrect tokenization of hex digits (issue 1605)
2016-10-09: Version 3.1.0
* Do not implicitly collect comments when comment attachment is specified (issue 1553)
* Fix incorrect handling of duplicated proto shorthand fields (issue 1485)
* Prohibit initialization in some variants of for statements (issue 1309, 1561)
* Fix incorrect parsing of export specifier (issue 1578)
* Fix ESTree compatibility for assignment pattern (issue 1575)
2016-09-03: Version 3.0.0
* Support ES2016 exponentiation expression (issue 1490)
* Support JSX syntax (issue 1467)
* Use the latest Unicode 8.0 (issue 1475)
* Add the support for syntax node delegate (issue 1435)
* Fix ESTree compatibility on meta property (issue 1338)
* Fix ESTree compatibility on default parameter value (issue 1081)
* Fix ESTree compatibility on try handler (issue 1030)
2016-08-23: Version 2.7.3
* Fix tokenizer confusion with a comment (issue 1493, 1516)
2016-02-02: Version 2.7.2
* Fix out-of-bound error location in an invalid string literal (issue 1457)
* Fix shorthand object destructuring defaults in variable declarations (issue 1459)
2015-12-10: Version 2.7.1
* Do not allow trailing comma in a variable declaration (issue 1360)
* Fix assignment to `let` in non-strict mode (issue 1376)
* Fix missing delegate property in YieldExpression (issue 1407)
2015-10-22: Version 2.7.0
* Fix the handling of semicolon in a break statement (issue 1044)
* Run the test suite with major web browsers (issue 1259, 1317)
* Allow `let` as an identifier in non-strict mode (issue 1289)
* Attach orphaned comments as `innerComments` (issue 1328)
* Add the support for token delegator (issue 1332)
2015-09-01: Version 2.6.0
* Properly allow or prohibit `let` in a binding identifier/pattern (issue 1048, 1098)
* Add sourceType field for Program node (issue 1159)
* Ensure that strict mode reserved word binding throw an error (issue 1171)
* Run the test suite with Node.js and IE 11 on Windows (issue 1294)
* Allow binding pattern with no initializer in a for statement (issue 1301)
2015-07-31: Version 2.5.0
* Run the test suite in a browser environment (issue 1004)
* Ensure a comma between imported default binding and named imports (issue 1046)
* Distinguish `yield` as a keyword vs an identifier (issue 1186)
* Support ES6 meta property `new.target` (issue 1203)
* Fix the syntax node for yield with expression (issue 1223)
* Fix the check of duplicated proto in property names (issue 1225)
* Fix ES6 Unicode escape in identifier name (issue 1229)
* Support ES6 IdentifierStart and IdentifierPart (issue 1232)
* Treat await as a reserved word when parsing as a module (issue 1234)
* Recognize identifier characters from Unicode SMP (issue 1244)
* Ensure that export and import can be followed by a comma (issue 1250)
* Fix yield operator precedence (issue 1262)
2015-07-01: Version 2.4.1
* Fix some cases of comment attachment (issue 1071, 1175)
* Fix the handling of destructuring in function arguments (issue 1193)
* Fix invalid ranges in assignment expression (issue 1201)
2015-06-26: Version 2.4.0
* Support ES6 for-of iteration (issue 1047)
* Support ES6 spread arguments (issue 1169)
* Minimize npm payload (issue 1191)
2015-06-16: Version 2.3.0
* Support ES6 generator (issue 1033)
* Improve parsing of regular expressions with `u` flag (issue 1179)
2015-04-17: Version 2.2.0
* Support ES6 import and export declarations (issue 1000)
* Fix line terminator before arrow not recognized as error (issue 1009)
* Support ES6 destructuring (issue 1045)
* Support ES6 template literal (issue 1074)
* Fix the handling of invalid/incomplete string escape sequences (issue 1106)
* Fix ES3 static member access restriction (issue 1120)
* Support for `super` in ES6 class (issue 1147)
2015-03-09: Version 2.1.0
* Support ES6 class (issue 1001)
* Support ES6 rest parameter (issue 1011)
* Expand the location of property getter, setter, and methods (issue 1029)
* Enable TryStatement transition to a single handler (issue 1031)
* Support ES6 computed property name (issue 1037)
* Tolerate unclosed block comment (issue 1041)
* Support ES6 lexical declaration (issue 1065)
2015-02-06: Version 2.0.0
* Support ES6 arrow function (issue 517)
* Support ES6 Unicode code point escape (issue 521)
* Improve the speed and accuracy of comment attachment (issue 522)
* Support ES6 default parameter (issue 519)
* Support ES6 regular expression flags (issue 557)
* Fix scanning of implicit octal literals (issue 565)
* Fix the handling of automatic semicolon insertion (issue 574)
* Support ES6 method definition (issue 620)
* Support ES6 octal integer literal (issue 621)
* Support ES6 binary integer literal (issue 622)
* Support ES6 object literal property value shorthand (issue 624)
2015-03-03: Version 1.2.5
* Fix scanning of implicit octal literals (issue 565)
2015-02-05: Version 1.2.4
* Fix parsing of LeftHandSideExpression in ForInStatement (issue 560)
* Fix the handling of automatic semicolon insertion (issue 574)
2015-01-18: Version 1.2.3
* Fix division by this (issue 616)
2014-05-18: Version 1.2.2
* Fix duplicated tokens when collecting comments (issue 537)
2014-05-04: Version 1.2.1
* Ensure that Program node may still have leading comments (issue 536)
2014-04-29: Version 1.2.0
* Fix semicolon handling for expression statement (issue 462, 533)
* Disallow escaped characters in regular expression flags (issue 503)
* Performance improvement for location tracking (issue 520)
* Improve the speed of comment attachment (issue 522)
2014-03-26: Version 1.1.1
* Fix token handling of forward slash after an array literal (issue 512)
2014-03-23: Version 1.1.0
* Optionally attach comments to the owning syntax nodes (issue 197)
* Simplify binary parsing with stack-based shift reduce (issue 352)
* Always include the raw source of literals (issue 376)
* Add optional input source information (issue 386)
* Tokenizer API for pure lexical scanning (issue 398)
* Improve the web site and its online demos (issue 337, 400, 404)
* Performance improvement for location tracking (issue 417, 424)
* Support HTML comment syntax (issue 451)
* Drop support for legacy browsers (issue 474)
2013-08-27: Version 1.0.4
* Minimize the payload for packages (issue 362)
* Fix missing cases on an empty switch statement (issue 436)
* Support escaped ] in regexp literal character classes (issue 442)
* Tolerate invalid left-hand side expression (issue 130)
2013-05-17: Version 1.0.3
* Variable declaration needs at least one declarator (issue 391)
* Fix benchmark's variance unit conversion (issue 397)
* IE < 9: \v should be treated as vertical tab (issue 405)
* Unary expressions should always have prefix: true (issue 418)
* Catch clause should only accept an identifier (issue 423)
* Tolerate setters without parameter (issue 426)
2012-11-02: Version 1.0.2
Improvement:
* Fix esvalidate JUnit output upon a syntax error (issue 374)
2012-10-28: Version 1.0.1
Improvements:
* esvalidate understands shebang in a Unix shell script (issue 361)
* esvalidate treats fatal parsing failure as an error (issue 361)
* Reduce Node.js package via .npmignore (issue 362)
2012-10-22: Version 1.0.0
Initial release.
|
{
"pile_set_name": "Github"
}
|
/**
* Copyright (C) 2001-2020 by RapidMiner and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapidminer.com
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version 3
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program.
* If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.operator.features.construction;
import com.rapidminer.example.Attribute;
import com.rapidminer.example.set.AttributeWeightedExampleSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
/**
* Inverts the used bit for every feature of every example set with a given fixed probability.
*
* @author Ingo Mierswa Exp $
*/
public class ExampleSetBasedSelectionMutation extends ExampleSetBasedIndividualOperator {
private double probability;
private Random random;
private int minNumber;
private int maxNumber;
private int exactNumber;
public ExampleSetBasedSelectionMutation(double probability, Random random, int minNumber, int maxNumber, int exactNumber) {
this.probability = probability;
this.random = random;
this.minNumber = minNumber;
this.maxNumber = maxNumber;
this.exactNumber = exactNumber;
}
@Override
public List<ExampleSetBasedIndividual> operate(ExampleSetBasedIndividual individual) {
List<ExampleSetBasedIndividual> l = new LinkedList<ExampleSetBasedIndividual>();
AttributeWeightedExampleSet clone = new AttributeWeightedExampleSet(individual.getExampleSet());
double prob = probability < 0 ? 1.0d / clone.getAttributes().size() : probability;
for (Attribute attribute : clone.getAttributes()) {
if (random.nextDouble() < prob) {
clone.flipAttributeUsed(attribute);
}
}
int numberOfFeatures = clone.getNumberOfUsedAttributes();
if (numberOfFeatures > 0) {
if (exactNumber > 0) {
if (numberOfFeatures == exactNumber) {
l.add(new ExampleSetBasedIndividual(clone));
}
} else {
if (((maxNumber < 1) || (numberOfFeatures <= maxNumber)) && (numberOfFeatures >= minNumber)) {
l.add(new ExampleSetBasedIndividual(clone));
}
}
}
// add also original ES
l.add(individual);
return l;
}
}
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (c) 2020 Jiri Svoboda
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* - The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/** @addtogroup libdisplay
* @{
*/
/** @file
*/
#ifndef _LIBDISPLAY_DISPLAY_INFO_H_
#define _LIBDISPLAY_DISPLAY_INFO_H_
#include <types/display/info.h>
#endif
/** @}
*/
|
{
"pile_set_name": "Github"
}
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'j \d\e F \d\e Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = r'j \d\e F \d\e Y à\s H:i'
YEAR_MONTH_FORMAT = r'F \d\e Y'
MONTH_DAY_FORMAT = r'j \d\e F'
SHORT_DATE_FORMAT = 'd/m/Y'
SHORT_DATETIME_FORMAT = 'd/m/Y H:i'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
# '%d de %b de %Y', '%d de %b, %Y', # '25 de Out de 2006', '25 Out, 2006'
# '%d de %B de %Y', '%d de %B, %Y', # '25 de Outubro de 2006', '25 de Outubro, 2006'
)
DATETIME_INPUT_FORMATS = (
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
{
"pile_set_name": "Github"
}
|
"use strict";var exports=module.exports={};/**
* Creates a function that returns `value`.
*
* @static
* @memberOf _
* @since 2.4.0
* @category Util
* @param {*} value The value to return from the new function.
* @returns {Function} Returns the new constant function.
* @example
*
* var objects = _.times(2, _.constant({ 'a': 1 }));
*
* console.log(objects);
* // => [{ 'a': 1 }, { 'a': 1 }]
*
* console.log(objects[0] === objects[1]);
* // => true
*/
function constant(value) {
return function() {
return value;
};
}
module.exports = constant;
|
{
"pile_set_name": "Github"
}
|
@using Kubernetes
@namespace Kubernetes.Pages
@addTagHelper *, Microsoft.AspNetCore.Mvc.TagHelpers
|
{
"pile_set_name": "Github"
}
|
{{#
var rows = d.rows;
}}
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
<title>{{= rows.title}} - {{lay.base.name}}</title>
<meta name="description" content="{{= d.description }}">
{{ include ../common/link }}
</head>
<body>
{{ include ../common/header }}
<div class="layui-hide-xs">
{{ include ../common/column }}
</div>
{{# var myself = rows.uid === user.id; }}
<div class="layui-container">
<div class="layui-row layui-col-space15">
<div class="layui-col-md8 content detail">
<div class="fly-panel detail-box">
<h1>{{=rows.title}}</h1>
<div class="fly-detail-info">
{{# if(rows.status == -1){ }}
<span class="layui-badge">审核中</span>
{{# } }}
<span class="layui-badge layui-bg-green fly-detail-column">
{{ lay.base.classes[rows['class']] ? lay.base.classes[rows['class']].title : '提问' }}
</span>
{{# if(rows.accept === -1){ }}
<span class="layui-badge" style="background-color: #999;">未结</span>
{{# } else if(rows.accept >= 0 ){ }}
<span class="layui-badge" style="background-color: #5FB878;">已结</span>
{{# } }}
{{ rows.stick > 0 ? '<span class="layui-badge layui-bg-black">置顶</span>' :'' }}
{{ rows.status == 1 ? '<span class="layui-badge layui-bg-red">精帖</span>' : ''}}
<div class="fly-admin-box" data-id="{{rows.id}}">
{{# if(user.auth == 1){ }}
<span class="layui-btn layui-btn-xs jie-admin" type="del">删除</span>
{{# } }}
{{# if(user.auth == 1){ }}
{{# if(rows.stick > 0){ }}
<span class="layui-btn layui-btn-xs jie-admin" type="set" field="stick" rank="0" style="background-color:#ccc;">取消置顶</span>
{{# } else { }}
<span class="layui-btn layui-btn-xs jie-admin" type="set" field="stick" rank="1">置顶</span>
{{# };
if(rows.status == 1){ }}
<span class="layui-btn layui-btn-xs jie-admin" type="set" field="status" rank="0" style="background-color:#ccc;">取消加精</span>
{{# } else { }}
<span class="layui-btn layui-btn-xs jie-admin" type="set" field="status" rank="1">加精</span>
{{# };
} }}
</div>
<span class="fly-list-nums">
<a href="#comment"><i class="iconfont" title="回答"></i> {{rows.comment}}</a>
<i class="iconfont" title="人气"></i> {{rows.hits}}
</span>
</div>
<div class="detail-about">
<a class="fly-avatar" href="/u/{{168*rows.uid}}/">
<img src="{{rows.user.avatar}}" alt="{{rows.user.username}}">
</a>
<div class="fly-detail-user">
<a href="/u/{{168*rows.uid}}/" class="fly-link">
<cite>{{rows.user.username}}</cite>
{{# if(rows.user.approve){ }}
<i class="iconfont icon-renzheng" title="认证信息:{{ rows.user.approve }}"></i>
{{# } }}
{{# if(rows.user.rmb){ }}
{{ lay.util.vipBadge(rows.user.rmb) }}
{{# } }}
</a>
<span>{{lay.time(rows.time, true)}}</span>
</div>
<div class="detail-hits" id="LAY_jieAdmin" data-id="{{rows.id}}">
<span style="padding-right: 10px; color: #FF7200">悬赏:{{rows.experience}}飞吻</span>
{{# if((user.username && myself && rows.accept == -1) || user.auth == 1){ }}
<span class="layui-btn layui-btn-xs jie-admin" type="edit"><a href="/jie/edit/{{rows.id}}">编辑此贴</a></span>
{{# } }}
</div>
</div>
<div class="detail-body photos">
{{# if(rows['class'] == '0' && rows.spe){ }}
<table class="layui-table">
<tbody>
<tr>
<td>版本:{{ rows.spe.project || '' }} {{ rows.spe.version || '' }}</td>
<td>浏览器:{{ rows.spe.browser || '' }}</td>
</tr>
</tbody>
</table>
{{# } }}
{{ d.content(rows.content) }}
</div>
</div>
{{# var jieda = rows.jieda; }}
<div class="fly-panel detail-box" id="flyReply">
<fieldset class="layui-elem-field layui-field-title" style="text-align: center;">
<legend>回帖</legend>
</fieldset>
<ul class="jieda" id="jieda">
{{# jieda.forEach(function(item, index){
var myda = item.user.username === user.username;
}}
<li data-id="{{item.id}}" {{item.id == rows.accept ? 'class="jieda-daan"' : '' }}>
<a name="item-{{item.time}}"></a>
<div class="detail-about detail-about-reply">
<a class="fly-avatar" href="/u/{{168*item.user.id}}/">
<img src="{{item.user.avatar}}" alt="{{item.user.username}}">
</a>
<div class="fly-detail-user">
<a href="/u/{{168*item.user.id}}/" class="fly-link">
<cite>{{item.user.username}}</cite>
{{# if(item.user.approve){ }}
<i class="iconfont icon-renzheng" title="认证信息:{{ item.user.approve }}"></i>
{{# } }}
{{# if(item.user.rmb){ }}
{{ lay.util.vipBadge(item.user.rmb) }}
{{# } }}
</a>
{{# if(item.user.username === rows.username){ }}
<span>(楼主)</span>
{{# } else if(item.user.auth == 1) { }}
<span style="color:#5FB878">(管理员)</span>
{{# } else if(item.user.auth == 2) { }}
<span style="color:#FF9E3F">(社区之光)</span>
{{# } else if(item.user.auth == -1) { }}
<span style="color:#999">(该号已被封)</span>
{{# } }}
</div>
<div class="detail-hits">
<span>{{lay.time(item.time, true)}}</span>
</div>
{{# if(item.id == rows.accept){ }}
<i class="iconfont icon-caina" title="最佳答案"></i>
{{# } }}
</div>
<div class="detail-body jieda-body photos">
{{ d.content(item.content) }}
</div>
<div class="jieda-reply">
<span class="jieda-zan {{d.session['zan'+item.id] ? 'zanok' : ''}}" type="zan">
<i class="iconfont icon-zan"></i>
<em>{{item.praise}}</em>
</span>
<span type="reply">
<i class="iconfont icon-svgmoban53"></i>
回复
</span>
{{# if(user.auth == 1 || user.auth == 2 || (user.username && myself && !myda)){ }}
<div class="jieda-admin">
{{# if(user.auth == 1 || (user.auth == 2 && item.accept != 1)){ }}
<span type="edit">
编辑
</span>
<span type="del">
删除
</span>
{{# if(rows.accept == -1){ }}
<span class="jieda-accept" type="accept">
采纳
</span>
{{# } }}
{{# } else if(rows.accept == -1 && !myda){ }}
<span class="jieda-accept" type="accept">
采纳
</span>
{{# } }}
</div>
{{# } }}
</div>
</li>
{{# }); if(jieda.length === 0){ }}
<li class="fly-none">消灭零回复</li>
{{# } }}
</ul>
<div style="text-align: center">
{{ d.laypage }}
</div>
<div class="layui-form layui-form-pane">
<form action="/jie/reply/" method="post">
<div class="layui-form-item layui-form-text">
<a name="comment"></a>
<div class="layui-input-block">
<textarea id="L_content" name="content" required lay-verify="required" placeholder="请输入内容" class="layui-textarea fly-editor" style="height: 150px;"></textarea>
</div>
</div>
<div class="layui-form-item">
<input type="hidden" name="jid" value="{{rows.id}}">
<input type="hidden" name="daPages" value="{{rows.jieda.pages}}">
<button class="layui-btn" lay-filter="*" lay-submit>提交回复</button>
</div>
</form>
</div>
</div>
</div>
<div class="layui-col-md4">
{{ include ../common/list-hot }}
{{ include ../ad/detail }}
{{ include ../ad/ours }}
<div class="fly-panel" style="padding: 20px 0; text-align: center;">
<img src="//cdn.layui.com/upload/2017_8/168_1501894831075_19619.jpg" style="max-width: 100%;" alt="layui">
<p style="position: relative; color: #666;">微信扫码关注 layui 公众号</p>
</div>
</div>
</div>
</div>
{{ include ../common/footer }}
</body>
</html>
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>$(EXECUTABLE_NAME)</string>
<key>CFBundleIdentifier</key>
<string>fr.whitebox.human-interface.resources.lion</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>$(PRODUCT_NAME)</string>
<key>CFBundlePackageType</key>
<string>BNDL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>1</string>
<key>NSHumanReadableCopyright</key>
<string>Copyright © 2018-2019 WhiteBox. All rights reserved.</string>
<key>NSPrincipalClass</key>
<string></string>
</dict>
</plist>
|
{
"pile_set_name": "Github"
}
|
QA output created by 340
checking default access for this host ...
sample.long.million
value 1000000
sample.write_me old value=2 new value=111
checking access for list (should fail)
pminfo: Cannot connect to PMCD on host "HOST": No permission to perform requested operation
pmcd.log:=======
endclient client[M]: (fd N) No permission to perform requested operation (-12387)
================
checking default access for this host ...
sample.long.million
value 1000000
sample.write_me old value=2 new value=444
checking access for iplist (should fail)
pminfo: Cannot connect to PMCD on host "HOST": No permission to perform requested operation
pmcd.log:=======
endclient client[M]: (fd N) No permission to perform requested operation (-12387)
================
|
{
"pile_set_name": "Github"
}
|
//=====================================================
// File : action_matrix_matrix_product_bis.hh
// Author : L. Plagne <laurent.plagne@edf.fr)>
// Copyright (C) EDF R&D, lun sep 30 14:23:19 CEST 2002
//=====================================================
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; either version 2
// of the License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
#ifndef ACTION_MATRIX_MATRIX_PRODUCT_BIS
#define ACTION_MATRIX_MATRIX_PRODUCT_BIS
#include "utilities.h"
#include "STL_interface.hh"
#include "STL_timer.hh"
#include <string>
#include "init_function.hh"
#include "init_vector.hh"
#include "init_matrix.hh"
using namespace std;
template<class Interface>
class Action_matrix_matrix_product_bis {
public :
static inline std::string name( void )
{
return "matrix_matrix_"+Interface::name();
}
static double nb_op_base(int size){
return 2.0*size*size*size;
}
static double calculate( int nb_calc, int size ) {
// STL matrix and vector initialization
typename Interface::stl_matrix A_stl;
typename Interface::stl_matrix B_stl;
typename Interface::stl_matrix X_stl;
init_matrix<pseudo_random>(A_stl,size);
init_matrix<pseudo_random>(B_stl,size);
init_matrix<null_function>(X_stl,size);
// generic matrix and vector initialization
typename Interface::gene_matrix A_ref;
typename Interface::gene_matrix B_ref;
typename Interface::gene_matrix X_ref;
typename Interface::gene_matrix A;
typename Interface::gene_matrix B;
typename Interface::gene_matrix X;
Interface::matrix_from_stl(A_ref,A_stl);
Interface::matrix_from_stl(B_ref,B_stl);
Interface::matrix_from_stl(X_ref,X_stl);
Interface::matrix_from_stl(A,A_stl);
Interface::matrix_from_stl(B,B_stl);
Interface::matrix_from_stl(X,X_stl);
// STL_timer utilities
STL_timer chronos;
// Baseline evaluation
chronos.start_baseline(nb_calc);
do {
Interface::copy_matrix(A_ref,A,size);
Interface::copy_matrix(B_ref,B,size);
Interface::copy_matrix(X_ref,X,size);
// Interface::matrix_matrix_product(A,B,X,size); This line must be commented !!!!
}
while(chronos.check());
chronos.report(true);
// Time measurement
chronos.start(nb_calc);
do {
Interface::copy_matrix(A_ref,A,size);
Interface::copy_matrix(B_ref,B,size);
Interface::copy_matrix(X_ref,X,size);
Interface::matrix_matrix_product(A,B,X,size); // here it is not commented !!!!
}
while(chronos.check());
chronos.report(true);
double time=chronos.calculated_time/2000.0;
// calculation check
typename Interface::stl_matrix resu_stl(size);
Interface::matrix_to_stl(X,resu_stl);
STL_interface<typename Interface::real_type>::matrix_matrix_product(A_stl,B_stl,X_stl,size);
typename Interface::real_type error=
STL_interface<typename Interface::real_type>::norm_diff(X_stl,resu_stl);
if (error>1.e-6){
INFOS("WRONG CALCULATION...residual=" << error);
exit(1);
}
// deallocation and return time
Interface::free_matrix(A,size);
Interface::free_matrix(B,size);
Interface::free_matrix(X,size);
Interface::free_matrix(A_ref,size);
Interface::free_matrix(B_ref,size);
Interface::free_matrix(X_ref,size);
return time;
}
};
#endif
|
{
"pile_set_name": "Github"
}
|
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chromoting;
import android.support.test.InstrumentationRegistry;
import android.support.test.filters.SmallTest;
import android.view.MotionEvent;
import android.view.ViewConfiguration;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.base.ThreadUtils;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.chromium.base.test.util.Feature;
/** Tests for {@link TapGestureDetector}. */
@RunWith(BaseJUnit4ClassRunner.class)
public class TapGestureDetectorTest {
private static class MockListener implements TapGestureDetector.OnTapListener {
private static final float COMPARISON_DELTA = 0.01f;
int mTapCount = -1;
int mLongPressCount = -1;
float mTapX = -1;
float mTapY = -1;
@Override
public boolean onTap(int pointerCount, float x, float y) {
Assert.assertEquals(-1, mTapCount);
Assert.assertEquals(-1, mTapX, COMPARISON_DELTA);
Assert.assertEquals(-1, mTapY, COMPARISON_DELTA);
mTapCount = pointerCount;
mTapX = x;
mTapY = y;
return true;
}
@Override
public void onLongPress(int pointerCount, float x, float y) {
Assert.assertEquals(-1, mLongPressCount);
Assert.assertEquals(-1, mTapX, COMPARISON_DELTA);
Assert.assertEquals(-1, mTapY, COMPARISON_DELTA);
mLongPressCount = pointerCount;
mTapX = x;
mTapY = y;
}
public void assertTapDetected(int expectedCount, float expectedX, float expectedY) {
Assert.assertEquals(expectedCount, mTapCount);
Assert.assertEquals(expectedX, mTapX, COMPARISON_DELTA);
Assert.assertEquals(expectedY, mTapY, COMPARISON_DELTA);
Assert.assertEquals(-1, mLongPressCount);
}
public void assertLongPressDetected(int expectedCount, float expectedX, float expectedY) {
Assert.assertEquals(expectedCount, mLongPressCount);
Assert.assertEquals(expectedX, mTapX, COMPARISON_DELTA);
Assert.assertEquals(expectedY, mTapY, COMPARISON_DELTA);
Assert.assertEquals(-1, mTapCount);
}
public void assertNothingDetected() {
Assert.assertEquals(-1, mTapCount);
Assert.assertEquals(-1, mLongPressCount);
Assert.assertEquals(-1, mTapX, COMPARISON_DELTA);
Assert.assertEquals(-1, mTapY, COMPARISON_DELTA);
}
}
private TapGestureDetector mDetector;
private MockListener mListener;
private TouchEventGenerator mEventGenerator;
/** Injects movement of a single finger (keeping other fingers in place). */
private void injectMoveEvent(int id, float x, float y) {
MotionEvent event = mEventGenerator.obtainMoveEvent(id, x, y);
mDetector.onTouchEvent(event);
event.recycle();
}
/** Injects a finger-down event (keeping other fingers in place). */
private void injectDownEvent(int id, float x, float y) {
MotionEvent event = mEventGenerator.obtainDownEvent(id, x, y);
mDetector.onTouchEvent(event);
event.recycle();
}
/** Injects a finger-up event (keeping other fingers in place). */
private void injectUpEvent(int id) {
MotionEvent event = mEventGenerator.obtainUpEvent(id);
mDetector.onTouchEvent(event);
event.recycle();
}
@Before
public void setUp() {
mListener = new MockListener();
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
mDetector = new TapGestureDetector(
InstrumentationRegistry.getTargetContext(), mListener);
}
});
mEventGenerator = new TouchEventGenerator();
}
/** Verifies that a simple down/up is detected as a tap. */
@Test
@SmallTest
@Feature({"Chromoting"})
public void testOneFingerDownUp() throws Exception {
injectDownEvent(0, 0, 0);
injectUpEvent(0);
mListener.assertTapDetected(1, 0, 0);
}
/** Verifies that a simple multi-finger down/up is detected as a tap. */
@Test
@SmallTest
@Feature({"Chromoting"})
public void testMultipleFingerDownUp() throws Exception {
injectDownEvent(0, 0, 0);
injectDownEvent(1, 100, 100);
injectDownEvent(2, 200, 200);
injectUpEvent(0);
injectUpEvent(1);
injectUpEvent(2);
mListener.assertTapDetected(3, 0, 0);
}
/** Verifies that a multi-finger tap is detected when lifting the fingers in reverse order. */
@Test
@SmallTest
@Feature({"Chromoting"})
public void testMultipleFingerDownUpReversed() throws Exception {
injectDownEvent(0, 0, 0);
injectDownEvent(1, 100, 100);
injectDownEvent(2, 200, 200);
injectUpEvent(2);
injectUpEvent(1);
injectUpEvent(0);
mListener.assertTapDetected(3, 0, 0);
}
/** Verifies that small movement of multiple fingers is still detected as a tap. */
@Test
@SmallTest
@Feature({"Chromoting"})
public void testMultipleFingerSmallMovements() throws Exception {
injectDownEvent(0, 0, 0);
injectDownEvent(1, 100, 100);
injectDownEvent(2, 200, 200);
injectMoveEvent(0, 1, 1);
injectMoveEvent(1, 101, 101);
injectMoveEvent(2, 202, 202);
injectUpEvent(0);
injectUpEvent(1);
injectUpEvent(2);
mListener.assertTapDetected(3, 0, 0);
}
/** Verifies that large motion of a finger prevents a tap being detected. */
@Test
@SmallTest
@Feature({"Chromoting"})
public void testLargeMotion() throws Exception {
injectDownEvent(0, 0, 0);
injectDownEvent(1, 100, 100);
injectDownEvent(2, 200, 200);
injectMoveEvent(1, 300, 300);
injectUpEvent(0);
injectUpEvent(1);
injectUpEvent(2);
mListener.assertNothingDetected();
}
/** Verifies that a long-press is detected. */
@Test
@SmallTest
@Feature({"Chromoting"})
public void testLongPress() throws Exception {
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
// Ensure the gesture-detector is created on the UI thread, so that it uses the
// Handler for the UI thread for LongPress notifications.
mDetector = new TapGestureDetector(
InstrumentationRegistry.getTargetContext(), mListener);
injectDownEvent(0, 0, 0);
}
});
Thread.sleep(2 * ViewConfiguration.getLongPressTimeout());
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
injectUpEvent(0);
}
});
mListener.assertLongPressDetected(1, 0, 0);
}
}
|
{
"pile_set_name": "Github"
}
|
/*
* cblas_zswap.c
*
* The program is a C interface to zswap.
*
* Written by Keita Teranishi. 2/11/1998
*
*/
#include "cblas.h"
#include "cblas_f77.h"
void cblas_zswap( const int N, void *X, const int incX, void *Y,
const int incY)
{
#ifdef F77_INT
F77_INT F77_N=N, F77_incX=incX, F77_incY=incY;
#else
#define F77_N N
#define F77_incX incX
#define F77_incY incY
#endif
F77_zswap( &F77_N, X, &F77_incX, Y, &F77_incY);
}
|
{
"pile_set_name": "Github"
}
|
package com.gentics.mesh.test.context.helper;
import static com.gentics.mesh.core.rest.job.JobStatus.COMPLETED;
import static com.gentics.mesh.test.ClientHelper.call;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;
import com.gentics.mesh.cli.BootstrapInitializerImpl;
import com.gentics.mesh.core.rest.MeshEvent;
import com.gentics.mesh.core.rest.job.JobListResponse;
import com.gentics.mesh.core.rest.job.JobResponse;
import com.gentics.mesh.core.rest.job.JobStatus;
import com.gentics.mesh.parameter.client.PagingParametersImpl;
import com.gentics.mesh.search.verticle.ElasticsearchProcessVerticle;
import com.gentics.mesh.search.verticle.eventhandler.SyncEventHandler;
import com.gentics.mesh.test.context.ClientHandler;
import com.gentics.mesh.test.context.event.EventAsserter;
import com.gentics.mesh.test.context.event.EventAsserterChain;
import com.gentics.mesh.test.util.TestUtils;
import io.reactivex.Completable;
import io.reactivex.functions.Action;
import io.vertx.core.eventbus.MessageConsumer;
public interface EventHelper extends BaseHelper {
EventAsserter eventAsserter();
/**
* Drop all indices and create a new index using the current data.
*
* @throws Exception
*/
default void recreateIndices() throws Exception {
// We potentially modified existing data thus we need to drop all indices and create them and reindex all data
SyncEventHandler.invokeClearCompletable(meshApi()).blockingAwait(10, TimeUnit.SECONDS);
SyncEventHandler.invokeSyncCompletable(meshApi()).blockingAwait(30, TimeUnit.SECONDS);
refreshIndices();
}
/**
* Wait until the given event has been received.
*
* @param address
* @param code
* @throws TimeoutException
*/
default void waitForEvent(String address, Action code) {
waitForEvent(address, code, 10_000);
}
default void waitForEvent(MeshEvent event, int timeoutMs) {
waitForEvent(event.getAddress(), () -> {
}, timeoutMs);
}
default void waitForEvent(String address, Action code, int timeoutMs) {
CountDownLatch latch = new CountDownLatch(1);
MessageConsumer<Object> consumer = vertx().eventBus().consumer(address);
consumer.handler(msg -> latch.countDown());
// The completion handler will be invoked once the consumer has been registered
consumer.completionHandler(res -> {
if (res.failed()) {
throw new RuntimeException("Could not listen to event", res.cause());
}
try {
code.run();
} catch (Exception e) {
throw new RuntimeException(e);
}
});
try {
latch.await(timeoutMs, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
consumer.unregister();
}
default void waitForSearchIdleEvent() {
getTestContext().waitForSearchIdleEvent();
}
default void waitForSearchIdleEvent(Completable completable) {
waitForEvent(MeshEvent.SEARCH_IDLE, () -> {
completable.subscribe(() -> vertx().eventBus().publish(MeshEvent.SEARCH_FLUSH_REQUEST.address, null));
});
refreshIndices();
}
default void waitForSearchIdleEvent(Action action) {
waitForSearchIdleEvent(() -> {
action.run();
return null;
});
}
default <T> T waitForSearchIdleEvent(Callable<T> action) {
try {
AtomicReference<T> ref = new AtomicReference<>();
waitForEvent(MeshEvent.SEARCH_IDLE, () -> {
ref.set(action.call());
vertx().eventBus().publish(MeshEvent.SEARCH_FLUSH_REQUEST.address, null);
});
refreshIndices();
return ref.get();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Wait until the given event has been received.
*
* @param event
* @param code
* @throws TimeoutException
*/
default void waitForEvent(MeshEvent event, Action code) {
waitForEvent(event.address, code);
}
/**
* Wait until the given event has been received.
*
* @param event
* @throws TimeoutException
*/
default void waitForEvent(MeshEvent event) {
waitForEvent(event.address, () -> {
});
}
default void waitForPluginRegistration() {
waitForEvent(MeshEvent.PLUGIN_REGISTERED, 20_000);
}
default JobListResponse waitForJob(Runnable action) {
return waitForJobs(action, COMPLETED, 1);
}
/**
* Run the given action with admin permissions enabled.
*
* @param action
* @throws Exception
*/
default <T> T runAsAdmin(Supplier<T> action) {
boolean isAdmin = tx(() -> user().isAdmin());
// Grant perms to check the job
if (!isAdmin) {
grantAdmin();
}
T t = action.get();
if (!isAdmin) {
revokeAdmin();
}
return t;
}
default void runAsAdmin(Runnable action) {
boolean isAdmin = tx(() -> user().isAdmin());
// Grant perms to check the job
if (!isAdmin) {
grantAdmin();
}
action.run();
if (!isAdmin) {
revokeAdmin();
}
}
/**
* Execute the action and check that the jobs are executed and yields the given status.
*
* @param action
* Action to be invoked. This action should trigger the migrations
* @param status
* Expected job status for all migrations. No assertion will be performed when the status is null
* @param expectedJobs
* Amount of expected jobs
* @return Migration status
*/
default JobListResponse waitForJobs(Runnable action, JobStatus status, int expectedJobs) {
// Load a status just before the action
JobListResponse before = runAsAdmin(() -> {
return call(() -> client().findJobs());
});
// Invoke the action
action.run();
// Now poll the migration status and check the response
final int MAX_WAIT = 120;
for (int i = 0; i < MAX_WAIT; i++) {
JobListResponse response = runAsAdmin(() -> call(() -> client().findJobs()));
if (response.getMetainfo().getTotalCount() == before.getMetainfo().getTotalCount() + expectedJobs) {
if (status != null) {
boolean allMatching = true;
for (JobResponse info : response.getData()) {
if (!status.equals(info.getStatus())) {
allMatching = false;
}
}
if (allMatching) {
return response;
}
}
}
if (i > 30) {
System.out.println(response.toJson());
}
if (i == MAX_WAIT - 1) {
throw new RuntimeException("Migration did not complete within " + MAX_WAIT + " seconds");
}
sleep(1000);
}
return null;
}
default void waitForLatestJob(Runnable action) {
waitForLatestJob(action, JobStatus.COMPLETED);
}
default void waitForLatestJob(Runnable action, JobStatus status) {
// Load a status just before the action
JobListResponse before = runAsAdmin(() -> call(() -> client().findJobs()));
// Invoke the action
action.run();
// Now poll the migration status and check the response
final int MAX_WAIT = 120;
for (int i = 0; i < MAX_WAIT; i++) {
JobListResponse response = runAsAdmin(() -> call(() -> client().findJobs()));
List<JobResponse> diff = TestUtils.difference(response.getData(), before.getData(), JobResponse::getUuid);
if (diff.size() > 1) {
System.out.println(response.toJson());
throw new RuntimeException("More jobs than expected");
}
if (diff.size() == 1) {
JobResponse newJob = diff.get(0);
if (newJob.getStatus().equals(status)) {
return;
}
}
if (i > 2) {
System.out.println(response.toJson());
}
if (i == MAX_WAIT - 1) {
throw new RuntimeException("Migration did not complete within " + MAX_WAIT + " seconds");
}
sleep(1000);
}
}
/**
* Execute the action and check that the migration is executed and yields the given status.
*
* @param action
* Action to be invoked. This action should trigger the jobs
* @param status
* Expected job status
* @return Job status
*/
default JobResponse waitForJob(Runnable action, String jobUuid, JobStatus status) {
// Invoke the action
action.run();
// Now poll the migration status and check the response
final int MAX_WAIT = 120;
for (int i = 0; i < MAX_WAIT; i++) {
JobResponse response = runAsAdmin(() -> call(() -> client().findJobByUuid(jobUuid)));
if (response.getStatus().equals(status)) {
return response;
}
if (i > 30) {
System.out.println(response.toJson());
}
if (i == MAX_WAIT - 1) {
throw new RuntimeException("Job did not complete within " + MAX_WAIT + " seconds");
}
sleep(1000);
}
return null;
}
/**
* Inform the job worker that new jobs have been enqueued and block until all jobs complete or the timeout has been reached.
*
* @param jobUuid
* Uuid of the job we should wait for
*
*/
default JobListResponse triggerAndWaitForJob(String jobUuid) {
return triggerAndWaitForJob(jobUuid, COMPLETED);
}
/**
* Inform the job worker that new jobs are enqueued and check the migration status. This method will block until the migration finishes or a timeout has
* been reached.
*
* @param jobUuid
* Uuid of the job we should wait for
* @param status
* Expected status for all jobs
*/
default JobListResponse triggerAndWaitForJob(String jobUuid, JobStatus status) {
waitForJob(() -> {
MeshEvent.triggerJobWorker(meshApi());
}, jobUuid, status);
return runAsAdmin(() -> call(() -> client().findJobs()));
}
default void triggerAndWaitForAllJobs(JobStatus expectedStatus) {
MeshEvent.triggerJobWorker(meshApi());
// Now poll the migration status and check the response
final int MAX_WAIT = 120;
for (int i = 0; i < MAX_WAIT; i++) {
JobListResponse response = runAsAdmin(() -> call(() -> client().findJobs(new PagingParametersImpl().setPerPage(200L))));
boolean allDone = true;
for (JobResponse info : response.getData()) {
if (!info.getStatus().equals(expectedStatus)) {
allDone = false;
}
}
if (allDone) {
break;
}
if (i > 30) {
System.out.println(response.toJson());
}
if (i == MAX_WAIT - 1) {
throw new RuntimeException("Job did not complete within " + MAX_WAIT + " seconds");
}
sleep(1000);
}
}
/**
* Call the given handler, latch for the future and assert success. Waits for search to be idle, then returns the result.
*
* @param handler
* handler
* @param <T>
* type of the returned object
* @return result of the future
*/
default <T> T callAndWait(ClientHandler<T> handler) {
try {
return waitForSearchIdleEvent(() -> handler.handle().blockingGet());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
default void refreshIndices() {
getSearchVerticle().refresh().blockingAwait(15, TimeUnit.SECONDS);
}
default ElasticsearchProcessVerticle getSearchVerticle() {
return ((BootstrapInitializerImpl) boot()).loader.get().getSearchVerticle();
}
/**
* Return the event asserter.
*
* @return
*/
default EventAsserterChain expect(MeshEvent event) {
return eventAsserter().expect(event);
}
default void awaitEvents() {
eventAsserter().await();
}
}
|
{
"pile_set_name": "Github"
}
|
blender = add
color = [255, 128, 255]
alpha = 150
gravity = 0
col_layer = -1
render_layer = 6
on ground_collision()
remove()
on timer(0)
remove()
|
{
"pile_set_name": "Github"
}
|
# Python package
# Create and test a Python package on multiple Python versions.
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
trigger:
branches:
include:
- master
- stable/*
tags:
include:
- '*'
pr:
autoCancel: true
branches:
include:
- '*' # must quote since "*" is a YAML reserved character; we want a string
stages:
- stage: 'Wheel_Builds'
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags')
jobs:
- job: 'linux'
pool: {vmImage: 'Ubuntu-16.04'}
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags')
variables:
python.version: '3.7'
CIBW_BEFORE_BUILD: pip install -U Cython
CIBW_SKIP: cp27-* cp34-* cp35-* pp*
TWINE_USERNAME: qiskit
CIBW_TEST_COMMAND: python {project}/examples/python/stochastic_swap.py
steps:
- task: UsePythonVersion@0
- bash: |
set -e
python -m pip install --upgrade pip
pip install cibuildwheel==1.3.0
pip install -U twine
cibuildwheel --output-dir wheelhouse .
- task: PublishBuildArtifacts@1
inputs: {pathtoPublish: 'wheelhouse'}
condition: succeededOrFailed()
- bash: |
twine upload wheelhouse/*
env:
TWINE_PASSWORD: $(TWINE_PASSWORD)
- job: 'sdist'
pool: {vmImage: 'Ubuntu-16.04'}
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags')
variables:
TWINE_USERNAME: qiskit
python.version: '3.7'
steps:
- task: UsePythonVersion@0
- bash: |
set -e
python -m pip install --upgrade pip
pip install -U twine
python setup.py sdist
- task: PublishBuildArtifacts@1
inputs: {pathtoPublish: 'dist'}
condition: succeededOrFailed()
- bash: |
twine upload dist/*
env:
TWINE_PASSWORD: $(TWINE_PASSWORD)
- job: 'macos'
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags')
pool: {vmImage: 'macOS-10.15'}
variables:
python.version: '3.7'
CIBW_BEFORE_BUILD: pip install -U Cython
CIBW_SKIP: cp27-* cp34-* cp35-* pp*
TWINE_USERNAME: qiskit
CIBW_TEST_COMMAND: python {project}/examples/python/stochastic_swap.py
steps:
- task: UsePythonVersion@0
- bash: |
set -e
python -m pip install --upgrade pip
pip install cibuildwheel==1.3.0
pip install -U twine
cibuildwheel --output-dir wheelhouse .
- task: PublishBuildArtifacts@1
inputs: {pathtoPublish: 'wheelhouse'}
condition: succeededOrFailed()
- bash: |
twine upload wheelhouse/*
env:
TWINE_PASSWORD: $(TWINE_PASSWORD)
- job: 'Windows'
pool: {vmImage: 'vs2017-win2016'}
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags')
variables:
CIBW_BEFORE_BUILD: pip install -U Cython
CIBW_SKIP: cp27-* cp34-* cp35-* pp*
TWINE_USERNAME: qiskit
CIBW_TEST_COMMAND: python {project}\examples\python\stochastic_swap.py
steps:
- {task: UsePythonVersion@0, inputs: {versionSpec: '3.6', architecture: x86}}
- {task: UsePythonVersion@0, inputs: {versionSpec: '3.6', architecture: x64}}
- {task: UsePythonVersion@0, inputs: {versionSpec: '3.7', architecture: x86}}
- {task: UsePythonVersion@0, inputs: {versionSpec: '3.7', architecture: x64}}
- {task: UsePythonVersion@0, inputs: {versionSpec: '3.8', architecture: x86}}
- {task: UsePythonVersion@0, inputs: {versionSpec: '3.8', architecture: x64}}
- script: choco install vcpython27 -f -y
displayName: Install Visual C++ for Python 2.7
- bash: |
set -e
python -m pip install --upgrade pip
pip install cibuildwheel==1.3.0
pip install -U twine
cibuildwheel --output-dir wheelhouse
- task: PublishBuildArtifacts@1
inputs: {pathtoPublish: 'wheelhouse'}
condition: succeededOrFailed()
- script: |
twine upload wheelhouse\*
env:
TWINE_PASSWORD: $(TWINE_PASSWORD)
- stage: 'Lint_and_Tests'
dependsOn: []
jobs:
- job: 'Linux_Tests'
pool: {vmImage: 'ubuntu-16.04'}
strategy:
matrix:
Python37:
python.version: '3.7'
variables:
QISKIT_SUPPRESS_PACKAGING_WARNINGS: Y
PIP_CACHE_DIR: $(Pipeline.Workspace)/.pip
QISKIT_TEST_CAPTURE_STREAMS: 1
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
displayName: 'Use Python $(python.version)'
- task: Cache@2
inputs:
key: 'pip | "$(Agent.OS)" | "$(python.version)" |"$(Build.BuildNumber)"'
restoreKeys: |
pip | "$(Agent.OS)" | "$(python.version)"
pip | "$(Agent.OS)"
pip
path: $(PIP_CACHE_DIR)
displayName: Cache pip
- bash: |
set -e
python -m pip install --upgrade pip setuptools wheel virtualenv
virtualenv test-job
source test-job/bin/activate
pip install -U -r requirements.txt -r requirements-dev.txt -c constraints.txt
pip install -c constraints.txt .
pip install "qiskit-ibmq-provider" "qiskit-aer" "z3-solver" -c constraints.txt
python setup.py build_ext --inplace
sudo apt install -y graphviz
pip check
displayName: 'Install dependencies'
- bash: |
set -e
source test-job/bin/activate
export PYTHONHASHSEED=$(python -S -c "import random; print(random.randint(1, 4294967295))")
echo "PYTHONHASHSEED=$PYTHONHASHSEED"
stestr run
displayName: 'Run tests'
- task: CopyFiles@2
condition: failed()
displayName: 'Copy images'
inputs:
contents: '**/*.png'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishBuildArtifacts@1
condition: failed()
displayName: 'Publish images'
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: 'drop_linux'
- bash: |
set -e
source test-job/bin/activate
pip install -U junitxml
mkdir -p junit
stestr last --subunit | tools/subunit_to_junit.py -o junit/test-results.xml
condition: succeededOrFailed()
displayName: 'Generate results'
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testResultsFiles: '**/test-*.xml'
testRunTitle: 'Test results for Linux Python $(python.version)'
- job: 'Lint'
pool: {vmImage: 'ubuntu-16.04'}
strategy:
matrix:
Python37:
python.version: '3.7'
variables:
PIP_CACHE_DIR: $(Pipeline.Workspace)/.pip
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
displayName: 'Use Python $(python.version)'
- task: Cache@2
inputs:
key: 'pip | "$(Agent.OS)" | "$(python.version)" | "$(Build.BuildNumber)"'
restoreKeys: |
pip | "$(Agent.OS)" | "$(python.version)"
pip | "$(Agent.OS)"
pip
path: $(PIP_CACHE_DIR)
displayName: Cache pip
- bash: |
set -e
python -m pip install --upgrade pip setuptools wheel virtualenv
virtualenv test-job
source test-job/bin/activate
pip install -U -r requirements.txt -r requirements-dev.txt -c constraints.txt
pip install -c constraints.txt -e .
pip install "qiskit-ibmq-provider" "qiskit-aer" -c constraints.txt
python setup.py build_ext --inplace
displayName: 'Install dependencies'
- bash: |
set -e
source test-job/bin/activate
pycodestyle --max-line-length=100 qiskit test
pylint -rn qiskit test
tools/verify_headers.py qiskit test
reno lint
displayName: 'Style and lint'
- job: 'Docs'
pool: {vmImage: 'ubuntu-16.04'}
strategy:
matrix:
Python37:
python.version: '3.7'
variables:
PIP_CACHE_DIR: $(Pipeline.Workspace)/.pip
steps:
- checkout: self
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
displayName: 'Use Python $(python.version)'
- task: Cache@2
inputs:
key: 'pip | "$(Agent.OS)" | "$(python.version)" | "$(Build.BuildNumber)"'
restoreKeys: |
pip | "$(Agent.OS)" | "$(python.version)"
pip | "$(Agent.OS)"
pip
path: $(PIP_CACHE_DIR)
displayName: Cache pip
- bash: |
set -e
python -m pip install --upgrade pip setuptools wheel
pip install -U tox
python setup.py build_ext --inplace
sudo apt install -y graphviz
displayName: 'Install dependencies'
- bash: |
tox -edocs
displayName: 'Run Docs build'
- task: ArchiveFiles@2
inputs:
rootFolderOrFile: 'docs/_build/html'
archiveType: tar
archiveFile: '$(Build.ArtifactStagingDirectory)/html_docs.tar.gz'
verbose: true
- task: PublishBuildArtifacts@1
displayName: 'Publish docs'
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: 'html_docs'
Parallel: true
ParallelCount: 8
- job: 'MacOS_Catalina_Tests'
pool: {vmImage: 'macOS-10.15'}
strategy:
matrix:
Python37:
python.version: '3.7'
variables:
QISKIT_SUPPRESS_PACKAGING_WARNINGS: Y
PIP_CACHE_DIR: $(Pipeline.Workspace)/.pip
QISKIT_TEST_CAPTURE_STREAMS: 1
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
displayName: 'Use Python $(python.version)'
- task: Cache@2
inputs:
key: 'pip | "$(Agent.OS)" | "$(python.version)" | "$(Build.BuildNumber)"'
restoreKeys: |
pip | "$(Agent.OS)" | "$(python.version)"
pip | "$(Agent.OS)"
pip
path: $(PIP_CACHE_DIR)
displayName: Cache pip
- bash: |
set -e
python -m pip install --upgrade pip setuptools wheel virtualenv
virtualenv test-job
source test-job/bin/activate
pip install -U -r requirements.txt -r requirements-dev.txt -c constraints.txt
pip install -c constraints.txt -e .
pip install "qiskit-ibmq-provider" "qiskit-aer" -c constraints.txt
python setup.py build_ext --inplace
pip check
displayName: 'Install dependencies'
- bash: |
set -e
source test-job/bin/activate
export PYTHONHASHSEED=$(python -S -c "import random; print(random.randint(1, 4294967295))")
echo "PYTHONHASHSEED=$PYTHONHASHSEED"
stestr run
displayName: 'Run tests'
- task: CopyFiles@2
condition: failed()
displayName: 'Copy images'
inputs:
contents: '**/*.png'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishBuildArtifacts@1
condition: failed()
displayName: 'Publish images'
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: 'drop_macos'
- bash: |
set -e
source test-job/bin/activate
pip install -U junitxml
mkdir -p junit
stestr last --subunit | tools/subunit_to_junit.py -o junit/test-results.xml
condition: succeededOrFailed()
displayName: 'Generate results'
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testResultsFiles: '**/test-*.xml'
testRunTitle: 'Test results for macOS Python $(python.version)'
- stage: 'Python_Tests'
condition: and(succeeded('Lint_and_Tests'), not(startsWith(variables['Build.SourceBranch'], 'refs/tags')))
jobs:
- job: 'Windows_Tests'
pool: {vmImage: 'vs2017-win2016'}
strategy:
matrix:
Python36:
python.version: '3.6'
Python37:
python.version: '3.7'
Python38:
python.version: '3.8'
variables:
QISKIT_SUPPRESS_PACKAGING_WARNINGS: Y
PIP_CACHE_DIR: $(Pipeline.Workspace)/.pip
QISKIT_TEST_CAPTURE_STREAMS: 1
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
displayName: 'Use Python $(python.version)'
- task: Cache@2
inputs:
key: 'pip | "$(Agent.OS)" | "$(python.version)" | "$(Build.BuildNumber)"'
restoreKeys: |
pip | "$(Agent.OS)" | "$(python.version)"
pip | "$(Agent.OS)"
pip
path: $(PIP_CACHE_DIR)
displayName: Cache pip
- bash: |
set -e
python -m pip install --upgrade pip setuptools wheel virtualenv
virtualenv test-job
source test-job/Scripts/activate
pip install -U -r requirements.txt -r requirements-dev.txt -c constraints.txt
pip install -c constraints.txt -e .
pip install "qiskit-ibmq-provider" "qiskit-aer" "z3-solver" -c constraints.txt
python setup.py build_ext --inplace
pip check
displayName: 'Install dependencies'
- bash: |
set -e
source test-job/Scripts/activate
export PYTHONHASHSEED=$(python -S -c "import random; print(random.randint(1, 1024))")
echo "PYTHONHASHSEED=$PYTHONHASHSEED"
stestr run
displayName: 'Run tests'
- task: CopyFiles@2
condition: failed()
displayName: 'Copy images'
inputs:
contents: '**/*.png'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishBuildArtifacts@1
condition: failed()
displayName: 'Publish images'
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: 'drop_windows'
- bash: |
set -e
source test-job/Scripts/activate
pip install -U junitxml
mkdir -p junit
stestr last --subunit | python tools/subunit_to_junit.py -o junit/test-results.xml
condition: succeededOrFailed()
displayName: 'Generate results'
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testResultsFiles: '**/test-*.xml'
testRunTitle: 'Test results for Windows Python $(python.version)'
- job: 'Linux_Tests'
pool: {vmImage: 'ubuntu-16.04'}
strategy:
matrix:
Python36:
python.version: '3.6'
Python38:
python.version: '3.8'
variables:
QISKIT_SUPPRESS_PACKAGING_WARNINGS: Y
PIP_CACHE_DIR: $(Pipeline.Workspace)/.pip
QISKIT_TEST_CAPTURE_STREAMS: 1
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
displayName: 'Use Python $(python.version)'
- task: Cache@2
inputs:
key: 'pip | "$(Agent.OS)" | "$(python.version)" | "$(Build.BuildNumber)"'
restoreKeys: |
pip | "$(Agent.OS)" | "$(python.version)"
pip | "$(Agent.OS)"
pip
path: $(PIP_CACHE_DIR)
displayName: Cache pip
- bash: |
set -e
python -m pip install --upgrade pip setuptools wheel virtualenv
virtualenv test-job
source test-job/bin/activate
pip install -U -r requirements.txt -r requirements-dev.txt -c constraints.txt
pip install -c constraints.txt -e .
pip install "qiskit-ibmq-provider" "qiskit-aer" "z3-solver" -c constraints.txt
python setup.py build_ext --inplace
sudo apt install -y graphviz
pip check
displayName: 'Install dependencies'
- bash: |
set -e
source test-job/bin/activate
export PYTHONHASHSEED=$(python -S -c "import random; print(random.randint(1, 4294967295))")
echo "PYTHONHASHSEED=$PYTHONHASHSEED"
stestr run
displayName: 'Run tests'
- task: CopyFiles@2
condition: failed()
displayName: 'Copy images'
inputs:
contents: '**/*.png'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishBuildArtifacts@1
condition: failed()
displayName: 'Publish images'
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: 'drop_linux'
- bash: |
set -e
source test-job/bin/activate
pip install -U junitxml
mkdir -p junit
stestr last --subunit | tools/subunit_to_junit.py -o junit/test-results.xml
condition: succeededOrFailed()
displayName: 'Generate results'
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testResultsFiles: '**/test-*.xml'
testRunTitle: 'Test results for Linux Python $(python.version)'
- job: 'MacOS_Catalina_Tests'
pool: {vmImage: 'macOS-10.15'}
strategy:
matrix:
Python36:
python.version: '3.6'
Python38:
python.version: '3.8'
variables:
QISKIT_SUPPRESS_PACKAGING_WARNINGS: Y
PIP_CACHE_DIR: $(Pipeline.Workspace)/.pip
QISKIT_TEST_CAPTURE_STREAMS: 1
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
displayName: 'Use Python $(python.version)'
- task: Cache@2
inputs:
key: 'pip | "$(Agent.OS)" | "$(python.version)" | "$(Build.BuildNumber)"'
restoreKeys: |
pip | "$(Agent.OS)" | "$(python.version)"
pip | "$(Agent.OS)"
pip
path: $(PIP_CACHE_DIR)
displayName: Cache pip
- bash: |
set -e
python -m pip install --upgrade pip setuptools wheel virtualenv
virtualenv test-job
source test-job/bin/activate
pip install -U -r requirements.txt -r requirements-dev.txt -c constraints.txt
pip install -c constraints.txt -e .
pip install "qiskit-ibmq-provider" "qiskit-aer" -c constraints.txt
python setup.py build_ext --inplace
pip check
displayName: 'Install dependencies'
- bash: |
set -e
source test-job/bin/activate
export PYTHONHASHSEED=$(python -S -c "import random; print(random.randint(1, 4294967295))")
echo "PYTHONHASHSEED=$PYTHONHASHSEED"
stestr run --concurrency 2
displayName: 'Run tests'
- task: CopyFiles@2
condition: failed()
displayName: 'Copy images'
inputs:
contents: '**/*.png'
targetFolder: $(Build.ArtifactStagingDirectory)
- task: PublishBuildArtifacts@1
condition: failed()
displayName: 'Publish images'
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: 'drop_macos'
- bash: |
set -e
source test-job/bin/activate
pip install -U junitxml
mkdir -p junit
stestr last --subunit | tools/subunit_to_junit.py -o junit/test-results.xml
condition: succeededOrFailed()
displayName: 'Generate results'
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testResultsFiles: '**/test-*.xml'
testRunTitle: 'Test results for macOS Python $(python.version)'
- job: 'Tutorials'
pool: {vmImage: 'ubuntu-latest'}
strategy:
matrix:
Python38:
python.version: '3.8'
variables:
QISKIT_SUPPRESS_PACKAGING_WARNINGS: Y
PIP_CACHE_DIR: $(Pipeline.Workspace)/.pip
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
displayName: 'Use Python $(python.version)'
- task: Cache@2
inputs:
key: 'pip | "$(Agent.OS)" | "$(python.version)"'
restoreKeys: |
pip | "$(Agent.OS)"
pip
path: $(PIP_CACHE_DIR)
displayName: Cache pip
- bash: |
set -e
git clone https://github.com/Qiskit/qiskit-tutorials --depth=1
python -m pip install --upgrade pip
pip install -U -r requirements.txt -r requirements-dev.txt -c constraints.txt
pip install -c constraints.txt -e .
pip install "qiskit-ibmq-provider" "qiskit-aer" "z3-solver" "qiskit-ignis" "qiskit-aqua" "pyscf<1.7.4" "matplotlib<3.3.0" sphinx nbsphinx sphinx_rtd_theme cvxpy -c constraints.txt
python setup.py build_ext --inplace
sudo apt install -y graphviz pandoc
pip check
displayName: 'Install dependencies'
- bash: |
set -e
cd qiskit-tutorials
sphinx-build -b html . _build/html
- task: ArchiveFiles@2
inputs:
rootFolderOrFile: 'qiskit-tutorials/_build/html'
archiveType: tar
archiveFile: '$(Build.ArtifactStagingDirectory)/html_tutorials.tar.gz'
verbose: true
- task: PublishBuildArtifacts@1
displayName: 'Publish docs'
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: 'html_tutorials'
Parallel: true
ParallelCount: 8
- job: 'Image_tests'
pool: {vmImage: 'ubuntu-latest'}
strategy:
matrix:
Python38:
python.version: '3.8'
variables:
QISKIT_SUPPRESS_PACKAGING_WARNINGS: Y
PIP_CACHE_DIR: $(Pipeline.Workspace)/.pip
steps:
- task: UsePythonVersion@0
inputs:
versionSpec: '$(python.version)'
displayName: 'Use Python $(python.version)'
- task: Cache@2
inputs:
key: 'pip | "$(Agent.OS)" | "$(python.version)"'
restoreKeys: |
pip | "$(Agent.OS)"
pip
path: $(PIP_CACHE_DIR)
displayName: Cache pip
- bash: |
set -e
python -m pip install --upgrade pip
pip install -U -r requirements.txt -c constraints.txt
pip install -c constraints.txt -e .
pip install "matplotlib<3.3.0" pylatexenc pillow
python setup.py build_ext --inplace
sudo apt install -y graphviz pandoc
pip check
displayName: 'Install dependencies'
- bash: python -m unittest discover -v test/ipynb
displayName: 'Run image test'
|
{
"pile_set_name": "Github"
}
|
// Pingus - A free Lemmings clone
// Copyright (C) 2000 Ingo Ruhnke <grumbel@gmail.com>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
#ifndef HEADER_PINGUS_ENGINE_SOUND_SOUND_HPP
#define HEADER_PINGUS_ENGINE_SOUND_SOUND_HPP
#include <memory>
#include <string>
#include "engine/sound/sound_impl.hpp"
namespace Sound {
class PingusSoundImpl;
class PingusSound
{
private:
static std::unique_ptr<PingusSoundImpl> sound;
public:
PingusSound () { }
~PingusSound () { }
static void init(std::unique_ptr<PingusSoundImpl> s = {});
static void deinit();
/** Load a sound file and play it immediately.
@param name Name of the sound, aka 'ohno'
@param volume volume
@param panning panning */
static void play_sound(const std::string & name, float volume = 1.0f, float panning = 0.0f);
static void play_music(const std::string & name, float volume = 1.0f, bool loop = true);
static void stop_music();
static void set_sound_volume(float volume);
static void set_music_volume(float volume);
static void set_master_volume(float volume);
static float get_sound_volume();
static float get_music_volume();
static float get_master_volume();
private:
PingusSound (const PingusSound&);
PingusSound& operator= (const PingusSound&);
};
} // namespace Sound
#endif
/* EOF */
|
{
"pile_set_name": "Github"
}
|
/* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.api.codegen.discogapic.transformer;
import com.google.api.codegen.discovery.Method;
import com.google.api.codegen.transformer.SurfaceNamer;
import com.google.api.codegen.util.Name;
import com.google.api.codegen.util.TypeName;
import com.google.api.codegen.util.TypeNameConverter;
import com.google.api.codegen.util.java.JavaNameFormatter;
import com.google.api.codegen.util.java.JavaTypeTable;
/** Provides language-specific names for variables and classes of Discovery-Document models. */
public class DiscoGapicNamer {
/* Create a DiscoGapicNamer for a Discovery-based API. */
public DiscoGapicNamer() {}
/** Returns the resource getter method name for a resource field. */
public String getResourceGetterName(String fieldName, SurfaceNamer languageNamer) {
return languageNamer.publicMethodName(
Name.anyCamel("get").join(DiscoGapicParser.stringToName(fieldName)));
}
/** Returns the resource setter method name for a resource field. */
public String getResourceSetterName(
String fieldName, SurfaceNamer.Cardinality isRepeated, SurfaceNamer languageNamer) {
switch (isRepeated) {
case IS_REPEATED:
return languageNamer.publicMethodName(
Name.from("add", "all").join(DiscoGapicParser.stringToName(fieldName)));
case NOT_REPEATED:
default:
return languageNamer.publicMethodName(
Name.from("set").join(DiscoGapicParser.stringToName(fieldName)));
}
}
/** Get the language-independent request message name from a method. */
public String getRequestMessageFullName(Method method, String defaultPackageName) {
// TODO remove reference to Java formatting - it is accidental that the fully-qualified
// message type matches the fully-qualified Java type
TypeNameConverter typeNameConverter = new JavaTypeTable(defaultPackageName);
JavaNameFormatter nameFormatter = new JavaNameFormatter();
return typeNameConverter
.getTypeNameInImplicitPackage(
nameFormatter.publicClassName(DiscoGapicParser.getRequestName(method)))
.getFullName();
}
/** Get the language-specific request type name from a method. */
public TypeName getRequestTypeName(Method method, SurfaceNamer languageNamer) {
TypeNameConverter typeNameConverter = languageNamer.getTypeNameConverter();
return typeNameConverter.getTypeNameInImplicitPackage(
languageNamer.publicClassName(DiscoGapicParser.getRequestName(method)));
}
}
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="utf-8"?>
<set xmlns:android="http://schemas.android.com/apk/res/android" >
<objectAnimator
android:duration="1000"
android:propertyName="cube"
android:valueFrom="1"
android:valueTo="0"
android:valueType="floatType" />
</set>
|
{
"pile_set_name": "Github"
}
|
---
external help file: d365fo.tools-help.xml
Module Name: d365fo.tools
online version:
schema: 2.0.0
---
# Set-D365RsatTier2Crypto
## SYNOPSIS
Set the needed configuration to work on Tier2+ environments
## SYNTAX
```
Set-D365RsatTier2Crypto [<CommonParameters>]
```
## DESCRIPTION
Set the needed registry settings for when you are running RSAT against a Tier2+ environment
## EXAMPLES
### EXAMPLE 1
```
Set-D365RsatTier2Crypto
```
This will configure the registry to support RSAT against a Tier2+ environment.
## PARAMETERS
### CommonParameters
This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see [about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216).
## INPUTS
## OUTPUTS
## NOTES
Tags: RSAT, Testing, Regression Suite Automation Test, Regression, Test, Automation, Configuration
Author: Mötz Jensen (@Splaxi)
## RELATED LINKS
|
{
"pile_set_name": "Github"
}
|
diff --git a/node_modules/xmlhttprequest/lib/XMLHttpRequest.js b/node_modules/xmlhttprequest/lib/XMLHttpRequest.js
index 4893913..0543f38 100644
--- a/node_modules/xmlhttprequest/lib/XMLHttpRequest.js
+++ b/node_modules/xmlhttprequest/lib/XMLHttpRequest.js
@@ -12,7 +12,7 @@
*/
var Url = require("url");
-var spawn = require("child_process").spawn;
+//var spawn= require("child_process").spawn;
var fs = require("fs");
exports.XMLHttpRequest = function() {
|
{
"pile_set_name": "Github"
}
|
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
namespace System.Fabric.FabricDeployer
{
using System.Xml.Serialization;
public class IPSubnet
{
private IPAddressDetails[] ipAddressDetailsCollection;
private string prefixField;
[XmlElementAttribute("IPAddress", Form = System.Xml.Schema.XmlSchemaForm.Unqualified)]
public IPAddressDetails[] IPAddress
{
get
{
return this.ipAddressDetailsCollection;
}
set
{
this.ipAddressDetailsCollection = value;
}
}
[XmlAttributeAttribute()]
public string Prefix
{
get
{
return this.prefixField;
}
set
{
this.prefixField = value;
}
}
}
}
|
{
"pile_set_name": "Github"
}
|
using System;
using System.Collections;
using System.Drawing;
using System.Windows.Forms;
namespace EVEMon.Controls
{
public class DraggableListView : ListView
{
public DraggableListView()
: base()
{
DraggableInit();
}
#region Draggable stuff
private const string REORDER = "Reorder";
private bool allowRowReorder = true;
public bool AllowRowReorder
{
get { return this.allowRowReorder; }
set
{
this.allowRowReorder = value;
base.AllowDrop = value;
}
}
public new SortOrder Sorting
{
get { return SortOrder.None; }
set { base.Sorting = SortOrder.None; }
}
private void DraggableInit()
{
this.AllowRowReorder = true;
}
public event EventHandler<ListViewDragEventArgs> ListViewItemsDragging;
public event EventHandler<EventArgs> ListViewItemsDragged;
private bool m_dragging = false;
private EVEMon.Common.Skill GetDraggingSkill(DragEventArgs e)
{
if (e.Data.GetDataPresent("System.Windows.Forms.TreeNode"))
{
return (EVEMon.Common.Skill)((TreeNode)e.Data.GetData("System.Windows.Forms.TreeNode")).Tag;
}
return null;
}
protected override void OnDragDrop(DragEventArgs e)
{
EVEMon.Common.Skill dragSkill = GetDraggingSkill(e);
if (dragSkill != null)
{
base.OnDragDrop(e);
return;
}
base.OnDragDrop(e);
m_dragging = false;
ClearDropMarker();
if (!this.AllowRowReorder)
{
return;
}
if (base.SelectedItems.Count == 0)
{
return;
}
Point cp = base.PointToClient(new Point(e.X, e.Y));
ListViewItem dragToItem = base.GetItemAt(cp.X, cp.Y);
if (dragToItem == null)
{
return;
}
int dropIndex = dragToItem.Index;
if (dropIndex > base.SelectedItems[0].Index)
{
dropIndex++;
}
if (ListViewItemsDragging != null)
{
ListViewDragEventArgs args = new ListViewDragEventArgs(base.SelectedItems[0].Index,
base.SelectedItems.Count, dropIndex);
ListViewItemsDragging(this, args);
if (args.Cancel)
{
return;
}
}
ArrayList insertItems = new ArrayList(base.SelectedItems.Count);
// Make a copy of all the selected items
foreach (ListViewItem item in base.SelectedItems)
{
insertItems.Add(item.Clone());
}
// insert the copied items in reverse order at the drop index so
// they appear in the right order after they've all been inserted
for (int i=insertItems.Count-1; i>=0; i--)
{
base.Items.Insert(dropIndex, (ListViewItem)insertItems[i]);
}
// remove the selected items
foreach (ListViewItem item in base.SelectedItems)
{
// must clear the items icon index or an exception is thrown when it is removed
item.StateImageIndex = -1;
base.Items.Remove(item);
}
if (ListViewItemsDragged != null)
{
ListViewItemsDragged(this, new EventArgs());
}
// if the item was dragged to the end of the plan.
if (dropIndex >= Items.Count)
{
base.EnsureVisible(Items.Count - 1);
}
else
{
base.EnsureVisible(dropIndex);
}
}
protected override void OnDragOver(DragEventArgs e)
{
EVEMon.Common.Skill dragSkill = GetDraggingSkill(e);
if (dragSkill != null)
{
base.OnDragOver(e);
return;
}
if (!this.AllowRowReorder)
{
e.Effect = DragDropEffects.None;
ClearDropMarker();
return;
}
if (!e.Data.GetDataPresent(DataFormats.Text))
{
e.Effect = DragDropEffects.None;
ClearDropMarker();
return;
}
Point cp = base.PointToClient(new Point(e.X, e.Y));
ListViewItem hoverItem = base.GetItemAt(cp.X, cp.Y);
if (hoverItem == null)
{
e.Effect = DragDropEffects.None;
ClearDropMarker();
return;
}
foreach (ListViewItem moveItem in base.SelectedItems)
{
if (moveItem.Index == hoverItem.Index)
{
e.Effect = DragDropEffects.None;
hoverItem.EnsureVisible();
ClearDropMarker();
return;
}
}
base.OnDragOver(e);
String text = (String)e.Data.GetData(REORDER.GetType());
if (text.CompareTo(REORDER) == 0)
{
e.Effect = DragDropEffects.Move;
hoverItem.EnsureVisible();
Rectangle hoverBounds = hoverItem.GetBounds(ItemBoundsPortion.ItemOnly);
DrawDropMarker(hoverItem.Index, (cp.Y > (hoverBounds.Top + (hoverBounds.Height / 2))));
}
else
{
e.Effect = DragDropEffects.None;
ClearDropMarker();
}
}
protected override void OnDragEnter(DragEventArgs e)
{
EVEMon.Common.Skill dragSkill = GetDraggingSkill(e);
if (dragSkill != null)
{
base.OnDragEnter(e);
return;
}
base.OnDragEnter(e);
if (!this.AllowRowReorder)
{
e.Effect = DragDropEffects.None;
ClearDropMarker();
return;
}
if (!e.Data.GetDataPresent(DataFormats.Text))
{
e.Effect = DragDropEffects.None;
ClearDropMarker();
return;
}
base.OnDragEnter(e);
String text = (String)e.Data.GetData(REORDER.GetType());
if (text.CompareTo(REORDER) == 0)
{
e.Effect = DragDropEffects.Move;
}
else
{
e.Effect = DragDropEffects.None;
ClearDropMarker();
}
}
private int m_dropMarkerOn = -1;
private bool m_dropMarkerBelow = false;
public void ClearDropMarker()
{
if (m_dropMarkerOn != -1)
{
this.RestrictedPaint();
}
m_dropMarkerOn = -1;
}
public void DrawDropMarker(int index, bool below)
{
if (m_dropMarkerOn != -1 && (m_dropMarkerOn != index || m_dropMarkerBelow != below))
{
ClearDropMarker();
}
if (m_dropMarkerOn != index)
{
m_dropMarkerOn = index;
m_dropMarkerBelow = below;
this.RestrictedPaint();
}
}
private void RestrictedPaint()
{
Rectangle itemRect = base.GetItemRect(m_dropMarkerOn, ItemBoundsPortion.ItemOnly);
Point start;
Point end;
start = new Point(itemRect.Left, (m_dropMarkerBelow ? itemRect.Bottom : itemRect.Top));
end = new Point((this.Width < itemRect.Right ? this.Width : itemRect.Right), (m_dropMarkerBelow ? itemRect.Bottom : itemRect.Top));
start = this.PointToScreen(start);
end = this.PointToScreen(end);
ControlPaint.DrawReversibleLine(start, end, SystemColors.Window);
}
protected override void OnPaint(PaintEventArgs e)
{
base.OnPaint(e);
if (m_dragging)
{
RestrictedPaint();
}
}
protected override void OnItemDrag(ItemDragEventArgs e)
{
base.OnItemDrag(e);
if (!this.AllowRowReorder)
{
return;
}
base.DoDragDrop(REORDER, DragDropEffects.Move);
m_dragging = true;
}
#endregion
}
public class ListViewDragEventArgs : EventArgs
{
private int m_movingFrom;
public int MovingFrom
{
get { return m_movingFrom; }
}
private int m_movingCount;
public int MovingCount
{
get { return m_movingCount; }
}
private int m_movingTo;
public int MovingTo
{
get { return m_movingTo; }
}
private bool m_cancel = false;
public bool Cancel
{
get { return m_cancel; }
set { m_cancel = value; }
}
internal ListViewDragEventArgs(int from, int count, int to)
{
m_movingFrom = from;
m_movingCount = count;
m_movingTo = to;
}
}
}
|
{
"pile_set_name": "Github"
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.core.construction;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkNotNull;
import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.beam.model.pipeline.v1.RunnerApi;
import org.apache.beam.model.pipeline.v1.RunnerApi.Components;
import org.apache.beam.model.pipeline.v1.RunnerApi.Environment;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.options.PortablePipelineOptions;
import org.apache.beam.sdk.runners.AppliedPTransform;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.util.NameUtils;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.WindowingStrategy;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.BiMap;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.HashBiMap;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
import org.checkerframework.checker.nullness.qual.Nullable;
/** SDK objects that will be represented at some later point within a {@link Components} object. */
public class SdkComponents {
private final String newIdPrefix;
private final RunnerApi.Components.Builder componentsBuilder = RunnerApi.Components.newBuilder();
private final BiMap<AppliedPTransform<?, ?, ?>, String> transformIds = HashBiMap.create();
private final BiMap<PCollection<?>, String> pCollectionIds = HashBiMap.create();
private final BiMap<WindowingStrategy<?, ?>, String> windowingStrategyIds = HashBiMap.create();
private final BiMap<Coder<?>, String> coderIds = HashBiMap.create();
private final BiMap<Environment, String> environmentIds = HashBiMap.create();
private final Set<String> requirements;
private final Set<String> reservedIds = new HashSet<>();
private String defaultEnvironmentId;
/** Create a new {@link SdkComponents} with no components. */
public static SdkComponents create() {
return new SdkComponents(RunnerApi.Components.getDefaultInstance(), null, "");
}
/**
* Create new {@link SdkComponents} importing all items from provided {@link Components} object.
*
* <p>WARNING: This action might cause some of duplicate items created.
*/
public static SdkComponents create(
RunnerApi.Components components, Collection<String> requirements) {
return new SdkComponents(components, requirements, "");
}
/*package*/ static SdkComponents create(
RunnerApi.Components components,
Map<String, AppliedPTransform<?, ?, ?>> transforms,
Map<String, PCollection<?>> pCollections,
Map<String, WindowingStrategy<?, ?>> windowingStrategies,
Map<String, Coder<?>> coders,
Map<String, Environment> environments,
Collection<String> requirements) {
SdkComponents sdkComponents = SdkComponents.create(components, requirements);
sdkComponents.transformIds.inverse().putAll(transforms);
sdkComponents.pCollectionIds.inverse().putAll(pCollections);
sdkComponents.windowingStrategyIds.inverse().putAll(windowingStrategies);
sdkComponents.coderIds.inverse().putAll(coders);
sdkComponents.environmentIds.inverse().putAll(environments);
return sdkComponents;
}
public static SdkComponents create(PipelineOptions options) {
SdkComponents sdkComponents =
new SdkComponents(RunnerApi.Components.getDefaultInstance(), null, "");
PortablePipelineOptions portablePipelineOptions = options.as(PortablePipelineOptions.class);
sdkComponents.registerEnvironment(
Environments.createOrGetDefaultEnvironment(portablePipelineOptions));
return sdkComponents;
}
private SdkComponents(
@Nullable Components components,
@Nullable Collection<String> requirements,
String newIdPrefix) {
this.newIdPrefix = newIdPrefix;
this.requirements = new HashSet<>();
if (components == null) {
if (requirements != null) {
this.requirements.addAll(requirements);
}
} else {
mergeFrom(components, requirements);
}
}
/** Merge Components proto into this SdkComponents instance. */
public void mergeFrom(
RunnerApi.Components components, @Nullable Collection<String> requirements) {
reservedIds.addAll(components.getTransformsMap().keySet());
reservedIds.addAll(components.getPcollectionsMap().keySet());
reservedIds.addAll(components.getWindowingStrategiesMap().keySet());
reservedIds.addAll(components.getCodersMap().keySet());
reservedIds.addAll(components.getEnvironmentsMap().keySet());
components.getEnvironmentsMap().forEach(environmentIds.inverse()::forcePut);
if (requirements != null) {
this.requirements.addAll(requirements);
}
componentsBuilder.mergeFrom(components);
}
/**
* Returns an SdkComponents like this one, but which will prefix all newly generated ids with the
* given string.
*
* <p>Useful for ensuring independently-constructed components have non-overlapping ids.
*/
public SdkComponents withNewIdPrefix(String newIdPrefix) {
SdkComponents sdkComponents =
new SdkComponents(componentsBuilder.build(), requirements, newIdPrefix);
sdkComponents.transformIds.putAll(transformIds);
sdkComponents.pCollectionIds.putAll(pCollectionIds);
sdkComponents.windowingStrategyIds.putAll(windowingStrategyIds);
sdkComponents.coderIds.putAll(coderIds);
sdkComponents.environmentIds.putAll(environmentIds);
return sdkComponents;
}
/**
* Registers the provided {@link AppliedPTransform} into this {@link SdkComponents}, returning a
* unique ID for the {@link AppliedPTransform}. Multiple registrations of the same {@link
* AppliedPTransform} will return the same unique ID.
*
* <p>All of the children must already be registered within this {@link SdkComponents}.
*/
public String registerPTransform(
AppliedPTransform<?, ?, ?> appliedPTransform, List<AppliedPTransform<?, ?, ?>> children)
throws IOException {
String name = getApplicationName(appliedPTransform);
// If this transform is present in the components, nothing to do. return the existing name.
// Otherwise the transform must be translated and added to the components.
if (componentsBuilder.getTransformsOrDefault(name, null) != null) {
return name;
}
checkNotNull(children, "child nodes may not be null");
componentsBuilder.putTransforms(
name, PTransformTranslation.toProto(appliedPTransform, children, this));
return name;
}
/**
* Gets the ID for the provided {@link AppliedPTransform}. The provided {@link AppliedPTransform}
* will not be added to the components produced by this {@link SdkComponents} until it is
* translated via {@link #registerPTransform(AppliedPTransform, List)}.
*/
private String getApplicationName(AppliedPTransform<?, ?, ?> appliedPTransform) {
String existing = transformIds.get(appliedPTransform);
if (existing != null) {
return existing;
}
String name = appliedPTransform.getFullName();
if (name.isEmpty()) {
name = "unnamed-ptransform";
}
name = uniqify(name, transformIds.values());
transformIds.put(appliedPTransform, name);
return name;
}
String getExistingPTransformId(AppliedPTransform<?, ?, ?> appliedPTransform) {
checkArgument(
transformIds.containsKey(appliedPTransform),
"%s %s has not been previously registered",
AppliedPTransform.class.getSimpleName(),
appliedPTransform);
return transformIds.get(appliedPTransform);
}
public String getPTransformIdOrThrow(AppliedPTransform<?, ?, ?> appliedPTransform) {
String existing = transformIds.get(appliedPTransform);
checkArgument(existing != null, "PTransform id not found for: %s", appliedPTransform);
return existing;
}
/**
* Registers the provided {@link PCollection} into this {@link SdkComponents}, returning a unique
* ID for the {@link PCollection}. Multiple registrations of the same {@link PCollection} will
* return the same unique ID.
*/
public String registerPCollection(PCollection<?> pCollection) throws IOException {
String existing = pCollectionIds.get(pCollection);
if (existing != null) {
return existing;
}
String uniqueName = uniqify(pCollection.getName(), pCollectionIds.values());
pCollectionIds.put(pCollection, uniqueName);
componentsBuilder.putPcollections(
uniqueName, PCollectionTranslation.toProto(pCollection, this));
return uniqueName;
}
/**
* Registers the provided {@link WindowingStrategy} into this {@link SdkComponents}, returning a
* unique ID for the {@link WindowingStrategy}. Multiple registrations of the same {@link
* WindowingStrategy} will return the same unique ID.
*/
public String registerWindowingStrategy(WindowingStrategy<?, ?> windowingStrategy)
throws IOException {
String existing = windowingStrategyIds.get(windowingStrategy);
if (existing != null) {
return existing;
}
String baseName =
String.format(
"%s(%s)",
NameUtils.approximateSimpleName(windowingStrategy),
NameUtils.approximateSimpleName(windowingStrategy.getWindowFn()));
String name = uniqify(baseName, windowingStrategyIds.values());
windowingStrategyIds.put(windowingStrategy, name);
RunnerApi.WindowingStrategy windowingStrategyProto =
WindowingStrategyTranslation.toProto(windowingStrategy, this);
componentsBuilder.putWindowingStrategies(name, windowingStrategyProto);
return name;
}
/**
* Registers the provided {@link Coder} into this {@link SdkComponents}, returning a unique ID for
* the {@link Coder}. Multiple registrations of the same {@link Coder} will return the same unique
* ID.
*
* <p>Coders are stored by identity to ensure that coders with implementations of {@link
* #equals(Object)} and {@link #hashCode()} but incompatible binary formats are not considered the
* same coder.
*/
public String registerCoder(Coder<?> coder) throws IOException {
String existing = coderIds.get(coder);
if (existing != null) {
return existing;
}
String baseName = NameUtils.approximateSimpleName(coder);
String name = uniqify(baseName, coderIds.values());
coderIds.put(coder, name);
RunnerApi.Coder coderProto = CoderTranslation.toProto(coder, this);
componentsBuilder.putCoders(name, coderProto);
return name;
}
/**
* Registers the provided {@link Environment} into this {@link SdkComponents}, returning a unique
* ID for the {@link Environment}. Multiple registrations of the same {@link Environment} will
* return the same unique ID.
*/
public String registerEnvironment(Environment env) {
String environmentId;
String existing = environmentIds.get(env);
if (existing != null) {
environmentId = existing;
} else {
String name = uniqify(env.getUrn(), environmentIds.values());
environmentIds.put(env, name);
componentsBuilder.putEnvironments(name, env);
environmentId = name;
}
if (defaultEnvironmentId == null) {
defaultEnvironmentId = environmentId;
}
return environmentId;
}
public String getOnlyEnvironmentId() {
// TODO Support multiple environments. The environment should be decided by the translation.
if (defaultEnvironmentId != null) {
return defaultEnvironmentId;
} else {
return Iterables.getOnlyElement(componentsBuilder.getEnvironmentsMap().keySet());
}
}
public void addRequirement(String urn) {
requirements.add(urn);
}
private String uniqify(String baseName, Set<String> existing) {
String name = newIdPrefix + baseName;
int increment = 1;
while (existing.contains(name) || reservedIds.contains(name)) {
name = newIdPrefix + baseName + Integer.toString(increment);
increment++;
}
return name;
}
/**
* Convert this {@link SdkComponents} into a {@link RunnerApi.Components}, including all of the
* contained {@link Coder coders}, {@link WindowingStrategy windowing strategies}, {@link
* PCollection PCollections}, and {@link PTransform PTransforms}.
*/
public RunnerApi.Components toComponents() {
return componentsBuilder.build();
}
public Collection<String> requirements() {
return ImmutableSet.copyOf(requirements);
}
}
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (c) 2000 Apple Computer, Inc. All rights reserved.
*
* @APPLE_LICENSE_HEADER_START@
*
* The contents of this file constitute Original Code as defined in and
* are subject to the Apple Public Source License Version 1.1 (the
* "License"). You may not use this file except in compliance with the
* License. Please obtain a copy of the License at
* http://www.apple.com/publicsource and read it before using this file.
*
* This Original Code and all software distributed under the License are
* distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, EITHER
* EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
* INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT. Please see the
* License for the specific language governing rights and limitations
* under the License.
*
* @APPLE_LICENSE_HEADER_END@
*/
/*
* @OSF_COPYRIGHT@
*/
/*
* HISTORY
*
* Revision 1.1.1.1 1998/09/22 21:05:48 wsanchez
* Import of Mac OS X kernel (~semeria)
*
* Revision 1.1.1.1 1998/03/07 02:26:09 wsanchez
* Import of OSF Mach kernel (~mburg)
*
* Revision 1.1.11.2 1995/01/06 19:11:11 devrcs
* mk6 CR668 - 1.3b26 merge
* Add padding for alpha, make n_other unsigned,
* fix erroneous def of N_FN.
* [1994/10/14 03:40:03 dwm]
*
* Revision 1.1.11.1 1994/09/23 01:23:37 ezf
* change marker to not FREE
* [1994/09/22 21:11:49 ezf]
*
* Revision 1.1.4.3 1993/07/27 18:28:42 elliston
* Add ANSI prototypes. CR #9523.
* [1993/07/27 18:13:44 elliston]
*
* Revision 1.1.4.2 1993/06/02 23:13:34 jeffc
* Added to OSF/1 R1.3 from NMK15.0.
* [1993/06/02 20:58:08 jeffc]
*
* Revision 1.1 1992/09/30 02:24:29 robert
* Initial revision
*
* $EndLog$
*/
/* CMU_HIST */
/*
* Revision 2.4 91/05/14 15:38:20 mrt
* Correcting copyright
*
* Revision 2.3 91/02/05 17:07:42 mrt
* Changed to new Mach copyright
* [91/01/31 16:20:26 mrt]
*
* 11-Aug-88 David Golub (dbg) at Carnegie-Mellon University
* Added n_un, n_strx definitions for kernel debugger (from
* a.out.h).
*
*/
/* CMU_ENDHIST */
/*
* Mach Operating System
* Copyright (c) 1991 Carnegie Mellon University
* All Rights Reserved.
*
* Permission to use, copy, modify and distribute this software and its
* documentation is hereby granted, provided that both the copyright
* notice and this permission notice appear in all copies of the
* software, derivative works or modified versions, and any portions
* thereof, and that both notices appear in supporting documentation.
*
* CARNEGIE MELLON ALLOWS FREE USE OF THIS SOFTWARE IN ITS "AS IS"
* CONDITION. CARNEGIE MELLON DISCLAIMS ANY LIABILITY OF ANY KIND FOR
* ANY DAMAGES WHATSOEVER RESULTING FROM THE USE OF THIS SOFTWARE.
*
* Carnegie Mellon requests users of this software to return to
*
* Software Distribution Coordinator or Software.Distribution@CS.CMU.EDU
* School of Computer Science
* Carnegie Mellon University
* Pittsburgh PA 15213-3890
*
* any improvements or extensions that they make and grant Carnegie Mellon
* the rights to redistribute these changes.
*/
/*
*/
/*
* nlist.h - symbol table entry structure for an a.out file
* derived from FSF's a.out.gnu.h
*
*/
#ifndef _DDB_NLIST_H_
#define _DDB_NLIST_H_
struct nlist {
union n_un {
char *n_name; /* symbol name */
long n_strx; /* index into file string table */
} n_un;
unsigned char n_type; /* type flag, i.e. N_TEXT etc; see below */
unsigned char n_other; /* unused */
short n_desc; /* see <stab.h> */
#if defined(__alpha)
int n_pad; /* alignment, used to carry framesize info */
#endif
vm_offset_t n_value; /* value of this symbol (or sdb offset) */
};
/*
* Simple values for n_type.
*/
#define N_UNDF 0 /* undefined */
#define N_ABS 2 /* absolute */
#define N_TEXT 4 /* text */
#define N_DATA 6 /* data */
#define N_BSS 8 /* bss */
#define N_FN 0x1e /* file name symbol */
#define N_EXT 1 /* external bit, or'ed in */
#define N_TYPE 0x1e /* mask for all the type bits */
#define N_STAB 0xe0 /* if any of these bits set, a SDB entry */
#endif /* !_DDB_NLIST_H_ */
|
{
"pile_set_name": "Github"
}
|
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="14.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{96563750-9265-4ACC-8E9E-61930A208A4D}</ProjectGuid>
<OutputType>WinExe</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>Optimizer</RootNamespace>
<AssemblyName>Optimizer</AssemblyName>
<TargetFrameworkVersion>v4.5.2</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
<AutoGenerateBindingRedirects>true</AutoGenerateBindingRedirects>
<IsWebBootstrapper>false</IsWebBootstrapper>
<PublishUrl>publish\</PublishUrl>
<Install>true</Install>
<InstallFrom>Disk</InstallFrom>
<UpdateEnabled>false</UpdateEnabled>
<UpdateMode>Foreground</UpdateMode>
<UpdateInterval>7</UpdateInterval>
<UpdateIntervalUnits>Days</UpdateIntervalUnits>
<UpdatePeriodically>false</UpdatePeriodically>
<UpdateRequired>false</UpdateRequired>
<MapFileExtensions>true</MapFileExtensions>
<ApplicationRevision>0</ApplicationRevision>
<ApplicationVersion>1.0.0.%2a</ApplicationVersion>
<UseApplicationTrust>false</UseApplicationTrust>
<BootstrapperEnabled>true</BootstrapperEnabled>
<TargetFrameworkProfile />
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>..\bin\debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<Prefer32Bit>false</Prefer32Bit>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>..\bin\release\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<Prefer32Bit>false</Prefer32Bit>
</PropertyGroup>
<PropertyGroup>
<ApplicationManifest>app.manifest</ApplicationManifest>
</PropertyGroup>
<PropertyGroup>
<ApplicationIcon>optimizer.ico</ApplicationIcon>
</PropertyGroup>
<ItemGroup>
<Reference Include="Microsoft.VisualBasic" />
<Reference Include="Newtonsoft.Json">
<HintPath>.\Newtonsoft.Json.dll</HintPath>
</Reference>
<Reference Include="System" />
<Reference Include="System.Core" />
<Reference Include="System.ServiceProcess" />
<Reference Include="System.Xml.Linq" />
<Reference Include="System.Data.DataSetExtensions" />
<Reference Include="Microsoft.CSharp" />
<Reference Include="System.Data" />
<Reference Include="System.Deployment" />
<Reference Include="System.Drawing" />
<Reference Include="System.Net.Http" />
<Reference Include="System.Windows.Forms" />
<Reference Include="System.Xml" />
<Reference Include="System.Management.Automation" />
</ItemGroup>
<ItemGroup>
<Compile Include="AboutForm.cs">
<SubType>Form</SubType>
</Compile>
<Compile Include="AboutForm.Designer.cs">
<DependentUpon>AboutForm.cs</DependentUpon>
</Compile>
<Compile Include="CleanHelper.cs" />
<Compile Include="EdgeForm.cs">
<SubType>Form</SubType>
</Compile>
<Compile Include="EdgeForm.Designer.cs">
<DependentUpon>EdgeForm.cs</DependentUpon>
</Compile>
<Compile Include="EmbeddedAssembly.cs" />
<Compile Include="Enums.cs" />
<Compile Include="HostsEditorForm.cs">
<SubType>Form</SubType>
</Compile>
<Compile Include="HostsEditorForm.Designer.cs">
<DependentUpon>HostsEditorForm.cs</DependentUpon>
</Compile>
<Compile Include="HostsHelper.cs" />
<Compile Include="InfoForm.cs">
<SubType>Form</SubType>
</Compile>
<Compile Include="InfoForm.Designer.cs">
<DependentUpon>InfoForm.cs</DependentUpon>
</Compile>
<Compile Include="Integrator.cs" />
<Compile Include="ListViewColumnSorter.cs" />
<Compile Include="MainForm.cs">
<SubType>Form</SubType>
</Compile>
<Compile Include="MainForm.Designer.cs">
<DependentUpon>MainForm.cs</DependentUpon>
</Compile>
<Compile Include="Optimize.cs" />
<Compile Include="Options.cs" />
<Compile Include="Program.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="HelperForm.cs">
<SubType>Form</SubType>
</Compile>
<Compile Include="HelperForm.Designer.cs">
<DependentUpon>HelperForm.cs</DependentUpon>
</Compile>
<Compile Include="Required.cs" />
<Compile Include="SilentConfig.cs" />
<Compile Include="SilentOps.cs" />
<Compile Include="StartupItem.cs" />
<Compile Include="ToggleSwitch\ImageHelper.cs" />
<Compile Include="ToggleSwitch\ToggleSwitch.cs">
<SubType>Component</SubType>
</Compile>
<Compile Include="ToggleSwitch\ToggleSwitchRenderer.cs" />
<Compile Include="ToggleSwitch\ToggleSwitchRendererBase.cs" />
<Compile Include="Utilities.cs" />
<EmbeddedResource Include="AboutForm.resx">
<DependentUpon>AboutForm.cs</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Include="EdgeForm.resx">
<DependentUpon>EdgeForm.cs</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Include="HostsEditorForm.resx">
<DependentUpon>HostsEditorForm.cs</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Include="InfoForm.resx">
<DependentUpon>InfoForm.cs</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Include="MainForm.resx">
<DependentUpon>MainForm.cs</DependentUpon>
</EmbeddedResource>
<EmbeddedResource Include="Properties\Resources.resx">
<Generator>ResXFileCodeGenerator</Generator>
<LastGenOutput>Resources.Designer.cs</LastGenOutput>
<SubType>Designer</SubType>
</EmbeddedResource>
<Compile Include="Properties\Resources.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Resources.resx</DependentUpon>
<DesignTime>True</DesignTime>
</Compile>
<EmbeddedResource Include="HelperForm.resx">
<DependentUpon>HelperForm.cs</DependentUpon>
</EmbeddedResource>
<None Include="app.manifest">
<SubType>Designer</SubType>
</None>
<None Include="packages.config" />
<None Include="Properties\Settings.settings">
<Generator>SettingsSingleFileGenerator</Generator>
<LastGenOutput>Settings.Designer.cs</LastGenOutput>
</None>
<Compile Include="Properties\Settings.Designer.cs">
<AutoGen>True</AutoGen>
<DependentUpon>Settings.settings</DependentUpon>
<DesignTimeSharedInput>True</DesignTimeSharedInput>
</Compile>
<None Include="Resources\DesktopShortcuts.reg" />
<None Include="Resources\DisableOfficeTelemetry.reg" />
<None Include="Resources\EnableOfficeTelemetry.reg" />
<None Include="Resources\EnableOfficeTelemetryTasks.bat" />
<None Include="Resources\EnableTelemetryTasks.bat" />
<None Include="Resources\EnableXboxTasks.bat" />
<None Include="Resources\hosts" />
<None Include="Resources\InstallTakeOwnership.reg" />
<None Include="Resources\PowerMenu.reg" />
<None Include="Resources\RemoveTakeOwnership.reg" />
<None Include="Resources\SystemShortcuts.reg" />
<None Include="Resources\SystemTools.reg" />
<None Include="Resources\WindowsApps.reg" />
</ItemGroup>
<ItemGroup>
<None Include="App.config" />
</ItemGroup>
<ItemGroup>
<None Include="Resources\DisableOfficeTelemetryTasks.bat" />
<None Include="Resources\DisableTelemetryTasks.bat" />
<None Include="Resources\DisableXboxTasks.bat" />
<None Include="Resources\OneDrive_Uninstaller.bin" />
</ItemGroup>
<ItemGroup>
<BootstrapperPackage Include=".NETFramework,Version=v4.5.2">
<Visible>False</Visible>
<ProductName>Microsoft .NET Framework 4.5.2 %28x86 and x64%29</ProductName>
<Install>true</Install>
</BootstrapperPackage>
<BootstrapperPackage Include="Microsoft.Net.Framework.3.5.SP1">
<Visible>False</Visible>
<ProductName>.NET Framework 3.5 SP1</ProductName>
<Install>false</Install>
</BootstrapperPackage>
</ItemGroup>
<ItemGroup>
<COMReference Include="Shell32">
<Guid>{50A7E9B0-70EF-11D1-B75A-00A0C90564FE}</Guid>
<VersionMajor>1</VersionMajor>
<VersionMinor>0</VersionMinor>
<Lcid>0</Lcid>
<WrapperTool>tlbimp</WrapperTool>
<Isolated>False</Isolated>
<EmbedInteropTypes>True</EmbedInteropTypes>
</COMReference>
</ItemGroup>
<ItemGroup>
<None Include="Resources\optimizer.png" />
</ItemGroup>
<ItemGroup>
<None Include="Resources\optimizer-ico.ico" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="Newtonsoft.Json.dll" />
<Content Include="optimizer.ico" />
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>
|
{
"pile_set_name": "Github"
}
|
# 599 Onatchiway
# County Title
title = c_onatchiway
# Settlements
max_settlements = 2
b_la_zec_QC = castle
# Misc
culture = innu
religion = ursuline
terrain = hills
|
{
"pile_set_name": "Github"
}
|
package conf
const (
USER string = "root"
PASSWORD string = "mysql01"
DB string = "sample"
HOST string = "192.168.99.100"
PORT string = "32769"
)
|
{
"pile_set_name": "Github"
}
|
diff -u -r ../otp-OTP-22.2.1/erts/emulator/nifs/common/socket_util.c ./erts/emulator/nifs/common/socket_util.c
--- ../otp-OTP-22.2.1/erts/emulator/nifs/common/socket_util.c 2019-12-18 16:48:36.000000000 +0000
+++ ./erts/emulator/nifs/common/socket_util.c 2020-01-02 21:53:42.278551000 +0000
@@ -1490,6 +1490,7 @@
break;
#endif
+#ifndef __ANDROID__
#if defined(PACKET_USER)
case PACKET_USER:
*ePktType = esock_atom_user;
@@ -1501,6 +1502,7 @@
*ePktType = esock_atom_kernel;
break;
#endif
+#endif
#if defined(PACKET_FASTROUTE)
case PACKET_FASTROUTE:
|
{
"pile_set_name": "Github"
}
|
import { AnnotationCommand } from '../editor'
import { $$ } from '../dom'
import LinkModal from './LinkModal'
export default class CreateLinkCommand extends AnnotationCommand {
// TODO: GDocs enables the tool even if over a link
// but not creating a new link, but opening the editor for the existing link
getCommandState (params, context) {
const sel = params.selection
const selectionState = params.selectionState
if (sel && !sel.isNull() && sel.isPropertySelection()) {
const links = selectionState.annosByType.get('link') || []
if (super.canCreate(links, sel, context)) {
return { disabled: false }
}
}
return { disabled: true }
}
execute (params, context) {
context.editorSession.getRootComponent().send('requestModal', () => {
return $$(LinkModal, { mode: 'create' })
}).then(modal => {
if (!modal) return
const href = modal.refs.href.val()
context.api.insertAnnotation('link', { href })
})
}
}
|
{
"pile_set_name": "Github"
}
|
/*
* Copyright (C) 2013-2015 RoboVM AB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.robovm.apple.gamekit;
/*<imports>*/
import java.io.*;
import java.nio.*;
import java.util.*;
import org.robovm.objc.*;
import org.robovm.objc.annotation.*;
import org.robovm.objc.block.*;
import org.robovm.rt.*;
import org.robovm.rt.annotation.*;
import org.robovm.rt.bro.*;
import org.robovm.rt.bro.annotation.*;
import org.robovm.rt.bro.ptr.*;
import org.robovm.apple.foundation.*;
import org.robovm.apple.uikit.*;
/*</imports>*/
/*<javadoc>*/
/*</javadoc>*/
/*<annotations>*/@Library("GameKit") @NativeClass/*</annotations>*/
/*<visibility>*/public/*</visibility>*/ class /*<name>*/GKScoreChallenge/*</name>*/
extends /*<extends>*/GKChallenge/*</extends>*/
/*<implements>*//*</implements>*/ {
/*<ptr>*/public static class GKScoreChallengePtr extends Ptr<GKScoreChallenge, GKScoreChallengePtr> {}/*</ptr>*/
/*<bind>*/static { ObjCRuntime.bind(GKScoreChallenge.class); }/*</bind>*/
/*<constants>*//*</constants>*/
/*<constructors>*/
public GKScoreChallenge() {}
protected GKScoreChallenge(Handle h, long handle) { super(h, handle); }
protected GKScoreChallenge(SkipInit skipInit) { super(skipInit); }
/*</constructors>*/
/*<properties>*/
@Property(selector = "score")
public native GKScore getScore();
/*</properties>*/
/*<members>*//*</members>*/
/*<methods>*/
/*</methods>*/
}
|
{
"pile_set_name": "Github"
}
|
xlator_LTLIBRARIES = gfid-access.la
xlatordir = $(libdir)/glusterfs/$(PACKAGE_VERSION)/xlator/features
gfid_access_la_LDFLAGS = -module $(GF_XLATOR_DEFAULT_LDFLAGS)
gfid_access_la_SOURCES = gfid-access.c
gfid_access_la_LIBADD = $(top_builddir)/libglusterfs/src/libglusterfs.la
noinst_HEADERS = gfid-access.h gfid-access-mem-types.h
AM_CPPFLAGS = $(GF_CPPFLAGS) -I$(top_srcdir)/libglusterfs/src \
-I$(top_srcdir)/rpc/xdr/src -I$(top_builddir)/rpc/xdr/src
AM_CFLAGS = -Wall $(GF_CFLAGS)
CLEANFILES =
|
{
"pile_set_name": "Github"
}
|
updating
updateUrl: aUrl
"Override this method to modify the WAUrl object which will be used as a base URL
while rendering. This method is usually called by a WAUpdateUrlVisitor.
If you are using Painters within a tree of Presenters/Components, this method will not
be called unless the Painter is included in the list of children of the Presenter."
|
{
"pile_set_name": "Github"
}
|
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netcoreapp3.1</TargetFramework>
<DebugType>Full</DebugType>
<Platforms>AnyCPU;x64;x86</Platforms>
<Version>5.8.0</Version>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\Wexflow.Core\Wexflow.Core.csproj" />
</ItemGroup>
</Project>
|
{
"pile_set_name": "Github"
}
|
package com.concurnas.compiler.ast;
import com.concurnas.compiler.ast.interfaces.Expression;
import com.concurnas.compiler.visitors.Visitor;
public class DuffAssign extends Assign {
public Expression e;
public DuffAssign(int line, int col, Expression e) {
super(line, col, false);
this.e = e; //stupid, just like a function call etc
}
public DuffAssign(Expression e) {
this(e.getLine(), e.getColumn(), e);
}
@Override
public boolean getCanBeOnItsOwnLine(){
return e.getCanBeOnItsOwnLine();
}
@Override
public Object accept(Visitor visitor) {
visitor.setLastLineVisited(super.getLine());
return visitor.visit(this);
}
@Override
public void setInsistNew(boolean b) {
throw new RuntimeException("setInsistNew not implemented on DuffAssign");
}
@Override
public Node copyTypeSpecific() {
return new DuffAssign(super.line, super.column, (Expression)e.copy());
}
public void setShouldBePresevedOnStack(boolean should)
{
((Node) e).setShouldBePresevedOnStack(should);
}
@Override
public boolean isInsistNew() {
return false;
}
@Override
public void setAnnotations(Annotations annotations) {
//nop
}
@Override
public Annotations getAnnotations() {
//nop
return null;
}
@Override
public boolean getIsValidAtClassLevel(){
if(e instanceof ExpressionList){
if(((ExpressionList)e).astRedirect instanceof AssignNew){
return true;
}
}
if(e instanceof FuncInvoke) {
return ((FuncInvoke)e).astRedirect != null;
}
if(e instanceof AsyncRefRef) {
return ((AsyncRefRef)e).astRedirect != null;
}
return isValidAtClassLevel;
}
@Override
public Expression getRHSExpression() {
return e;
}
@Override
public Expression setRHSExpression(Expression what) {
//Expression before = e;
//this.e = what;
return e;
}
@Override
public void setAssignStyleEnum(AssignStyleEnum to) {
}
}
|
{
"pile_set_name": "Github"
}
|
import styled from 'styled-components'
import { ReactComponent as MainLogo } from 'images/logo-react-zzaria.svg'
const Logo = styled(MainLogo)`
height: 50px;
width: 200px;
& path {
fill: ${({ theme }) => theme.palette.common.white};
}
& line {
stroke: ${({ theme }) => theme.palette.common.white};
}
`
export default Logo
|
{
"pile_set_name": "Github"
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.