repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
smgoller/geode | geode-gfsh/src/integrationTest/java/org/apache/geode/management/internal/cli/functions/Geode3544JUnitTest.java | 4524 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.functions;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.io.Serializable;
import java.util.List;
import java.util.Objects;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionFactory;
import org.apache.geode.cache.RegionShortcut;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.management.internal.cli.domain.DataCommandResult;
public class Geode3544JUnitTest {
private static Cache cache;
private static final String PARTITIONED_REGION = "emp_region";
private static String emp_key;
static class EmpProfile implements Serializable {
private static final long serialVersionUID = 1L;
private long data;
public EmpProfile() {
}
public EmpProfile(long in_data) {
this.data = in_data;
}
public long getData() {
return data;
}
public void setData(long data) {
this.data = data;
}
}
public static class EmpData extends EmpProfile {
private short empId;
private Integer empNumber;
private long empAccount;
public EmpData() {
super();
}
public EmpData(long in_data, short in_empId, Integer in_empNumber, long in_empAccount) {
super(in_data);
this.empId = in_empId;
this.empNumber = in_empNumber;
this.empAccount = in_empAccount;
}
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (other instanceof EmpData) {
return this.getEmpId() == (((EmpData) other).getEmpId());
}
return true;
}
@Override
public String toString() {
return "data:" + getData() + "," + "empId" + getEmpId();
}
public short getEmpId() {
return empId;
}
public void setEmpId(short empId) {
this.empId = empId;
}
public Integer getEmpNumber() {
return empNumber;
}
public void setEmpNumber(Integer empNumber) {
this.empNumber = empNumber;
}
public long getEmpAccount() {
return empAccount;
}
public void setEmpAccount(long empAccount) {
this.empAccount = empAccount;
}
@Override
public int hashCode() {
return Objects.hash(empAccount, empNumber, empId);
}
}
@BeforeClass
public static void setUp() throws Exception {
cache = new CacheFactory().set(MCAST_PORT, "0").create();
RegionFactory<EmpData, String> factory = cache.createRegionFactory(RegionShortcut.PARTITION);
Region<EmpData, String> region1 = factory.create(PARTITIONED_REGION);
EmpData emp_data_key = new EmpData(1, (short) 1, 1, 1);
region1.put(emp_data_key, "value_1");
ObjectMapper mapper = new ObjectMapper();
emp_key = mapper.writeValueAsString(emp_data_key);
}
@AfterClass
public static void tearDown() {
cache.close();
cache = null;
}
/*
* This test addresses GEODE-3544
*/
@Test
public void testLocateKeyIsObject() {
DataCommandFunction dataCmdFn = new DataCommandFunction();
DataCommandResult result = dataCmdFn.locateEntry(emp_key, EmpData.class.getName(),
String.class.getName(), PARTITIONED_REGION, false, (InternalCache) cache);
assertNotNull(result);
result.aggregate(null);
List<DataCommandResult.KeyInfo> keyInfos = result.getLocateEntryLocations();
assertEquals(1, keyInfos.size());
}
}
| apache-2.0 |
Pushjet/Pushjet-Android | gradle/wrapper/dists/gradle-1.12-all/4ff8jj5a73a7zgj5nnzv1ubq0/gradle-1.12/src/core-impl/org/gradle/api/internal/artifacts/ivyservice/dynamicversions/ModuleVersionsCacheEntry.java | 1104 | /*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.ivyservice.dynamicversions;
import org.gradle.api.internal.artifacts.ivyservice.ivyresolve.ModuleVersionListing;
class ModuleVersionsCacheEntry {
public ModuleVersionListing moduleVersionListing;
public long createTimestamp;
ModuleVersionsCacheEntry(ModuleVersionListing moduleVersionListing, long createTimestamp) {
this.moduleVersionListing = moduleVersionListing;
this.createTimestamp = createTimestamp;
}
}
| bsd-2-clause |
ekollof/DarkUniverse | lib/Freemarker/source/src/test/java/freemarker/ext/jsp/taglibmembers/EnclosingClass.java | 892 | /*
* Copyright 2014 Attila Szegedi, Daniel Dekany, Jonathan Revusky
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package freemarker.ext.jsp.taglibmembers;
public class EnclosingClass {
public static class NestedClass {
public static double hypotenuse(double a, double b) {
return Math.sqrt(a * a + b * b);
}
}
}
| bsd-2-clause |
codeaudit/Foundry | Components/LearningCore/Source/gov/sandia/cognition/statistics/distribution/NormalInverseGammaDistribution.java | 11067 | /*
* File: NormalInverseGammaDistribution.java
* Authors: Kevin R. Dixon
* Company: Sandia National Laboratories
* Project: Cognitive Foundry
*
* Copyright Mar 16, 2010, Sandia Corporation.
* Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
* license for use of this work by or on behalf of the U.S. Government.
* Export of this program may require a license from the United States
* Government. See CopyrightHistory.txt for complete details.
*
*/
package gov.sandia.cognition.statistics.distribution;
import gov.sandia.cognition.annotation.PublicationReference;
import gov.sandia.cognition.annotation.PublicationReferences;
import gov.sandia.cognition.annotation.PublicationType;
import gov.sandia.cognition.math.matrix.Vector;
import gov.sandia.cognition.math.matrix.VectorFactory;
import gov.sandia.cognition.math.matrix.VectorInputEvaluator;
import gov.sandia.cognition.statistics.AbstractDistribution;
import gov.sandia.cognition.statistics.ClosedFormComputableDistribution;
import gov.sandia.cognition.statistics.ProbabilityDensityFunction;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Random;
/**
* The normal inverse-gamma distribution is the product of a univariate
* Gaussian distribution with an inverse-gamma distribution. It is the
* conjugate prior of a univariate Gaussian with unknown mean and unknown
* variance. (As far as I know, it has no other purpose.)
* @author Kevin R. Dixon
* @since 3.0
*/
@PublicationReferences(
references={
@PublicationReference(
author="Christopher M. Bishop",
title="Pattern Recognition and Machine Learning",
type=PublicationType.Book,
year=2006,
pages={101}
)
,
@PublicationReference(
author="Wikipedia",
title="Normal-scaled inverse gamma distribution",
type=PublicationType.WebPage,
year=2010,
url="http://en.wikipedia.org/wiki/Normal-scaled_inverse_gamma_distribution"
)
}
)
public class NormalInverseGammaDistribution
extends AbstractDistribution<Vector>
implements ClosedFormComputableDistribution<Vector>
{
/**
* Default location, {@value}.
*/
public static final double DEFAULT_LOCATION = 0.0;
/**
* Default precision, {@value}.
*/
public static final double DEFAULT_PRECISION = 1.0;
/**
* Default shape, {@value}.
*/
public static final double DEFAULT_SHAPE = InverseGammaDistribution.DEFAULT_SHAPE;
/**
* Default scale, {@value}.
*/
public static final double DEFAULT_SCALE = InverseGammaDistribution.DEFAULT_SCALE;
/**
* Location of the Gaussian kernel.
*/
private double location;
/**
* Precision of the Gaussian kernel, must be greater than zero.
*/
private double precision;
/**
* Shape parameter of the Inverse Gamma kernel, must be greater than zero.
*/
private double shape;
/**
* Scale parameter of the Inverse Gamma kernel, must be greater than zero.
*/
private double scale;
/**
* Creates a new instance of NormalInverseGammaDistribution
*/
public NormalInverseGammaDistribution()
{
this( DEFAULT_LOCATION, DEFAULT_PRECISION, DEFAULT_SHAPE, DEFAULT_SCALE );
}
/**
* Creates a new instance of NormalInverseGammaDistribution
* @param location
* Location of the Gaussian kernel.
* @param precision
* Precision of the Gaussian kernel, must be greater than zero.
* @param shape
* Shape parameter of the Inverse Gamma kernel, must be greater than zero.
* @param scale
* Scale parameter of the Inverse Gamma kernel, must be greater than zero.
*/
public NormalInverseGammaDistribution(
double location,
double precision,
double shape,
double scale)
{
this.setLocation(location);
this.setPrecision(precision);
this.setShape(shape);
this.setScale(scale);
}
/**
* Copy constructor
* @param other
* NormalInverseGammaDistribution to copy
*/
public NormalInverseGammaDistribution(
NormalInverseGammaDistribution other )
{
this( other.getLocation(), other.getPrecision(),
other.getShape(), other.getScale() );
}
@Override
public NormalInverseGammaDistribution clone()
{
return (NormalInverseGammaDistribution) super.clone();
}
public Vector getMean()
{
if( this.shape > 1.0 )
{
return VectorFactory.getDefault().copyValues(
this.location, this.scale/(this.shape-1.0) );
}
else
{
throw new IllegalArgumentException(
"Shape must be > 1.0 for a mean" );
}
}
@Override
public void sampleInto(
final Random random,
final int sampleCount,
final Collection<? super Vector> output)
{
InverseGammaDistribution.CDF inverseGamma =
new InverseGammaDistribution.CDF(this.shape, this.scale);
UnivariateGaussian.CDF gaussian =
new UnivariateGaussian.CDF(this.location, 1.0 / this.precision);
final double[] variances = inverseGamma.sampleAsDoubles(random, sampleCount);
for (double variance : variances)
{
gaussian.setVariance(variance / this.precision);
double mean = gaussian.sample(random);
output.add(VectorFactory.getDefault().copyValues(mean, variance));
}
}
public Vector convertToVector()
{
return VectorFactory.getDefault().copyValues(
this.getLocation(), this.getPrecision(),
this.getShape(), this.getScale() );
}
public void convertFromVector(
Vector parameters)
{
parameters.assertDimensionalityEquals(4);
this.setLocation( parameters.getElement(0) );
this.setPrecision( parameters.getElement(1) );
this.setShape( parameters.getElement(2) );
this.setScale( parameters.getElement(3) );
}
public NormalInverseGammaDistribution.PDF getProbabilityFunction()
{
return new NormalInverseGammaDistribution.PDF( this );
}
/**
* Getter for location.
* @return
* Location of the Gaussian kernel.
*/
public double getLocation()
{
return this.location;
}
/**
* Setter for location.
* @param location
* Location of the Gaussian kernel.
*/
public void setLocation(
double location)
{
this.location = location;
}
/**
* Getter for precision
* @return
* Precision of the Gaussian kernel, must be greater than zero.
*/
public double getPrecision()
{
return this.precision;
}
/**
* Setter for precision.
* @param precision
* Precision of the Gaussian kernel, must be greater than zero.
*/
public void setPrecision(
double precision)
{
if( precision <= 0.0 )
{
throw new IllegalArgumentException( "Precision must be > 0.0" );
}
this.precision = precision;
}
/**
* Getter for shape
* @return
* Shape parameter of the Inverse Gamma kernel, must be greater than zero.
*/
public double getShape()
{
return this.shape;
}
/**
* Setter for shape
* @param shape
* Shape parameter of the Inverse Gamma kernel, must be greater than zero.
*/
public void setShape(
double shape)
{
if( shape <= 0.0 )
{
throw new IllegalArgumentException( "Shape must be > 0.0" );
}
this.shape = shape;
}
/**
* Getter for scale
* @return
* Scale parameter of the Inverse Gamma kernel, must be greater than zero.
*/
public double getScale()
{
return this.scale;
}
/**
* Setter for scale
* @param scale
* Scale parameter of the Inverse Gamma kernel, must be greater than zero.
*/
public void setScale(
double scale)
{
if( scale <= 0.0 )
{
throw new IllegalArgumentException( "Scale must be > 0.0" );
}
this.scale = scale;
}
@Override
public String toString()
{
return "Location: " + this.getLocation() + ", Precision: " + this.getPrecision()
+ ", Shape: " + this.getShape() + ", Scale: " + this.getScale();
}
/**
* PDF of the NormalInverseGammaDistribution
*/
public static class PDF
extends NormalInverseGammaDistribution
implements ProbabilityDensityFunction<Vector>,
VectorInputEvaluator<Vector,Double>
{
/**
* Creates a new instance of NormalInverseGammaDistribution
*/
public PDF()
{
super();
}
/**
* Creates a new instance of NormalInverseGammaDistribution
* @param location
* Location of the Gaussian kernel.
* @param precision
* Precision of the Gaussian kernel, must be greater than zero.
* @param shape
* Shape parameter of the Inverse Gamma kernel, must be greater than zero.
* @param scale
* Scale parameter of the Inverse Gamma kernel, must be greater than zero.
*/
public PDF(
double location,
double precision,
double shape,
double scale)
{
super( location, precision, shape, scale);
}
/**
* Copy constructor
* @param other
* NormalInverseGammaDistribution to copy
*/
public PDF(
NormalInverseGammaDistribution other )
{
super( other );
}
@Override
public NormalInverseGammaDistribution.PDF getProbabilityFunction()
{
return this;
}
public double logEvaluate(
Vector input)
{
input.assertDimensionalityEquals(2);
double mean = input.getElement(0);
double variance = input.getElement(1);
InverseGammaDistribution.PDF inverseGamma =
new InverseGammaDistribution.PDF( this.getShape(), this.getScale() );
UnivariateGaussian.PDF gaussian = new UnivariateGaussian.PDF(
this.getLocation(), variance / this.getPrecision() );
double logInverseGamma = inverseGamma.logEvaluate(variance);
double logGaussian = gaussian.logEvaluate(mean);
return logGaussian + logInverseGamma;
}
public Double evaluate(
Vector input)
{
return Math.exp( this.logEvaluate(input) );
}
public int getInputDimensionality()
{
return 2;
}
}
}
| bsd-3-clause |
sailajaa/CONNECT | Product/Production/Services/PatientDiscoveryCore/src/main/java/gov/hhs/fha/nhinc/patientdiscovery/nhin/deferred/response/proxy/NhinPatientDiscoveryDeferredRespProxyNoOpImpl.java | 2810 | /*
* Copyright (c) 2012, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.patientdiscovery.nhin.deferred.response.proxy;
import gov.hhs.fha.nhinc.common.nhinccommon.AssertionType;
import gov.hhs.fha.nhinc.common.nhinccommon.NhinTargetSystemType;
import gov.hhs.fha.nhinc.aspect.NwhinInvocationEvent;
import gov.hhs.fha.nhinc.patientdiscovery.aspect.PRPAIN201306UV02EventDescriptionBuilder;
import gov.hhs.fha.nhinc.patientdiscovery.aspect.MCCIIN000002UV01EventDescriptionBuilder;
import org.hl7.v3.MCCIIN000002UV01;
import org.hl7.v3.PRPAIN201306UV02;
/**
*
* @author JHOPPESC
*/
public class NhinPatientDiscoveryDeferredRespProxyNoOpImpl implements NhinPatientDiscoveryDeferredRespProxy {
@NwhinInvocationEvent(beforeBuilder = PRPAIN201306UV02EventDescriptionBuilder.class,
afterReturningBuilder = MCCIIN000002UV01EventDescriptionBuilder.class,
serviceType = "Patient Discovery Deferred Response",
version = "1.0")
public MCCIIN000002UV01 respondingGatewayPRPAIN201306UV02(PRPAIN201306UV02 body, AssertionType assertion,
NhinTargetSystemType target) {
return new MCCIIN000002UV01();
}
}
| bsd-3-clause |
xzy256/grpc-java-mips64 | benchmarks/src/main/java/io/grpc/benchmarks/qps/ClientConfiguration.java | 10781 | /*
* Copyright 2015, Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
*
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.grpc.benchmarks.qps;
import static io.grpc.benchmarks.Utils.parseBoolean;
import static java.lang.Integer.parseInt;
import static java.util.Arrays.asList;
import io.grpc.ManagedChannel;
import io.grpc.benchmarks.Transport;
import io.grpc.benchmarks.Utils;
import io.grpc.benchmarks.proto.Control.RpcType;
import io.grpc.benchmarks.proto.Messages;
import io.grpc.benchmarks.proto.Messages.PayloadType;
import io.grpc.testing.TestUtils;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Set;
/**
* Configuration options for benchmark clients.
*/
public class ClientConfiguration implements Configuration {
private static final ClientConfiguration DEFAULT = new ClientConfiguration();
Transport transport = Transport.NETTY_NIO;
boolean tls;
boolean testca;
String authorityOverride = TestUtils.TEST_SERVER_HOST;
boolean useDefaultCiphers;
boolean directExecutor;
SocketAddress address;
int channels = 4;
int outstandingRpcsPerChannel = 10;
int serverPayload;
int clientPayload;
int flowControlWindow = Utils.DEFAULT_FLOW_CONTROL_WINDOW;
// seconds
int duration = 60;
// seconds
int warmupDuration = 10;
int targetQps;
String histogramFile;
RpcType rpcType = RpcType.UNARY;
PayloadType payloadType = PayloadType.COMPRESSABLE;
private ClientConfiguration() {
}
public ManagedChannel newChannel() throws IOException {
return Utils.newClientChannel(transport, address, tls, testca, authorityOverride,
useDefaultCiphers, flowControlWindow, directExecutor);
}
public Messages.SimpleRequest newRequest() {
return Utils.makeRequest(payloadType, clientPayload, serverPayload);
}
/**
* Constructs a builder for configuring a client application with supported parameters. If no
* parameters are provided, all parameters are assumed to be supported.
*/
static Builder newBuilder(ClientParam... supportedParams) {
return new Builder(supportedParams);
}
static final class Builder extends AbstractConfigurationBuilder<ClientConfiguration> {
private final Collection<Param> supportedParams;
private Builder(ClientParam... supportedParams) {
this.supportedParams = supportedOptionsSet(supportedParams);
}
@Override
protected ClientConfiguration newConfiguration() {
return new ClientConfiguration();
}
@Override
protected Collection<Param> getParams() {
return supportedParams;
}
@Override
protected ClientConfiguration build0(ClientConfiguration config) {
if (config.tls) {
if (!config.transport.tlsSupported) {
throw new IllegalArgumentException(
"Transport " + config.transport.name().toLowerCase() + " does not support TLS.");
}
if (config.transport != Transport.OK_HTTP
&& config.testca && config.address instanceof InetSocketAddress) {
// Override the socket address with the host from the testca.
InetSocketAddress address = (InetSocketAddress) config.address;
config.address = TestUtils.testServerAddress(address.getHostName(),
address.getPort());
}
}
// Verify that the address type is correct for the transport type.
config.transport.validateSocketAddress(config.address);
return config;
}
private static Set<Param> supportedOptionsSet(ClientParam... supportedParams) {
if (supportedParams.length == 0) {
// If no options are supplied, default to including all options.
supportedParams = ClientParam.values();
}
return Collections.unmodifiableSet(new LinkedHashSet<Param>(asList(supportedParams)));
}
}
enum ClientParam implements AbstractConfigurationBuilder.Param {
ADDRESS("STR", "Socket address (host:port) or Unix Domain Socket file name "
+ "(unix:///path/to/file), depending on the transport selected.", null, true) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.address = Utils.parseSocketAddress(value);
}
},
CHANNELS("INT", "Number of Channels.", "" + DEFAULT.channels) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.channels = parseInt(value);
}
},
OUTSTANDING_RPCS("INT", "Number of outstanding RPCs per Channel.",
"" + DEFAULT.outstandingRpcsPerChannel) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.outstandingRpcsPerChannel = parseInt(value);
}
},
CLIENT_PAYLOAD("BYTES", "Payload Size of the Request.", "" + DEFAULT.clientPayload) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.clientPayload = parseInt(value);
}
},
SERVER_PAYLOAD("BYTES", "Payload Size of the Response.", "" + DEFAULT.serverPayload) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.serverPayload = parseInt(value);
}
},
TLS("", "Enable TLS.", "" + DEFAULT.tls) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.tls = parseBoolean(value);
}
},
TESTCA("", "Use the provided Test Certificate for TLS.", "" + DEFAULT.testca) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.testca = parseBoolean(value);
}
},
USE_DEFAULT_CIPHERS("", "Use the default JDK ciphers for TLS (Used to support Java 7).",
"" + DEFAULT.useDefaultCiphers) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.useDefaultCiphers = parseBoolean(value);
}
},
TRANSPORT("STR", Transport.getDescriptionString(), DEFAULT.transport.name().toLowerCase()) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.transport = Transport.valueOf(value.toUpperCase());
}
},
DURATION("SECONDS", "Duration of the benchmark.", "" + DEFAULT.duration) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.duration = parseInt(value);
}
},
WARMUP_DURATION("SECONDS", "Warmup Duration of the benchmark.", "" + DEFAULT.warmupDuration) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.warmupDuration = parseInt(value);
}
},
DIRECTEXECUTOR("",
"Don't use a threadpool for RPC calls, instead execute calls directly "
+ "in the transport thread.", "" + DEFAULT.directExecutor) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.directExecutor = parseBoolean(value);
}
},
SAVE_HISTOGRAM("FILE", "Write the histogram with the latency recordings to file.", null) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.histogramFile = value;
}
},
STREAMING_RPCS("", "Use Streaming RPCs.", "false") {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.rpcType = RpcType.STREAMING;
}
},
FLOW_CONTROL_WINDOW("BYTES", "The HTTP/2 flow control window.",
"" + DEFAULT.flowControlWindow) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.flowControlWindow = parseInt(value);
}
},
TARGET_QPS("INT", "Average number of QPS to shoot for.", "" + DEFAULT.targetQps, true) {
@Override
protected void setClientValue(ClientConfiguration config, String value) {
config.targetQps = parseInt(value);
}
};
private final String type;
private final String description;
private final String defaultValue;
private final boolean required;
ClientParam(String type, String description, String defaultValue) {
this(type, description, defaultValue, false);
}
ClientParam(String type, String description, String defaultValue, boolean required) {
this.type = type;
this.description = description;
this.defaultValue = defaultValue;
this.required = required;
}
@Override
public String getName() {
return name().toLowerCase();
}
@Override
public String getType() {
return type;
}
@Override
public String getDescription() {
return description;
}
@Override
public String getDefaultValue() {
return defaultValue;
}
@Override
public boolean isRequired() {
return required;
}
@Override
public void setValue(Configuration config, String value) {
setClientValue((ClientConfiguration) config, value);
}
protected abstract void setClientValue(ClientConfiguration config, String value);
}
}
| bsd-3-clause |
sailajaa/CONNECT | Product/Production/Common/CONNECTCoreLib/src/main/java/gov/hhs/fha/nhinc/gateway/executorservice/NhinTaskExecutor.java | 7263 | /*
* Copyright (c) 2012, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.gateway.executorservice;
import gov.hhs.fha.nhinc.orchestration.OutboundOrchestratableMessage;
import gov.hhs.fha.nhinc.orchestration.OutboundResponseProcessor;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.Future;
import org.apache.log4j.Logger;
import com.google.common.base.Optional;
/**
* Main unit of execution Executes a DQ or PD request currently, but could be used to execute any of the Nhin
* transaction requests (such as DR or DeferredPD)
*
* Uses generics for CumulativeResponse (which represents final object that is returned). Each IndividualResponse
* returned from executed NhinCallableRequest contains the OutboundResponseProcessor for the IndividualResponse
*
* Constructs with the java.util.concurrent.ExecutorService to use to execute the requests and a List of
* NhinCallableRequest to be submitted to ExecutorService
*
* Uses an ExecutorCompletionService, and executeTask will return only when all CallableRequest have completed/returned.
* Once executeTask has returned, call getFinalResponse to get the final cumulative/aggregated/processed response which
* contains all the responses from the individual NhinCallableRequest
*
* @author paul.eftis
*/
public class NhinTaskExecutor<CumulativeResponse extends OutboundOrchestratableMessage, IndividualResponse extends OutboundOrchestratableMessage> {
private static final Logger LOG = Logger.getLogger(NhinTaskExecutor.class);
private CumulativeResponse cumulativeResponse = null;
private Executor executor = null;
private String transactionId = null;
private List<NhinCallableRequest<IndividualResponse>> callableList = new ArrayList<NhinCallableRequest<IndividualResponse>>();
/**
*
*/
public NhinTaskExecutor(Executor e, List<NhinCallableRequest<IndividualResponse>> list, String id) {
transactionId = id;
executor = e;
callableList = list;
}
/**
* Called when TaskExecutor is complete to retrieve the final result
*
* @return Response which contains all the responses from the individual CallableRequest aggregated into a single
* response
*/
public CumulativeResponse getFinalResponse() {
return cumulativeResponse;
}
@SuppressWarnings("static-access")
public void executeTask() throws InterruptedException, ExecutionException {
LOG.debug("NhinTaskExecutor::executeTask begin");
try {
CompletionService<IndividualResponse> executorCompletionService = new ExecutorCompletionService<IndividualResponse>(
executor);
// loop through the callableList and submit the callable requests for execution
for (NhinCallableRequest<IndividualResponse> c : callableList) {
executorCompletionService.submit(c);
}
// the executor completion service puts the callable responses on a
// blocking queue where you retrieve <Future> responses off queue using
// take(), when they become available
int count = 0;
for (NhinCallableRequest<IndividualResponse> c : callableList) {
Future<IndividualResponse> future = executorCompletionService.take();
// for debug
count++;
LOG.debug("NhinTaskExecutor::executeTask::take received response count=" + count);
if (future != null) {
try {
IndividualResponse r = (IndividualResponse) future.get();
if (r != null) {
// process response
Optional<OutboundResponseProcessor> optionalProcessor = r.getResponseProcessor();
if (!optionalProcessor.isPresent()) {
throw new IllegalArgumentException(
"IndividualResponse.getResponseProcessor returned null");
}
OutboundResponseProcessor processor = optionalProcessor.get();
cumulativeResponse = (CumulativeResponse) processor.processNhinResponse(r,
cumulativeResponse);
} else {
// shouldn't ever get here, but if we do all we can do is log and skip it
LOG.error("NhinTaskExecutor::executeTask (count=" + count + ") received null response!!!!!");
}
} catch (Exception e) {
// shouldn't ever get here
LOG.error("NhinTaskExecutor processResponse EXCEPTION!!!");
ExecutorServiceHelper.getInstance().outputCompleteException(e);
}
} else {
// shouldn't ever get here
LOG.error("NhinTaskExecutor::executeTask received null future from queue (i.e. take)!!!!!");
}
}
LOG.debug("NhinTaskExecutor::executeTask done");
} catch (Exception e) {
// shouldn't ever get here
LOG.error("NhinTaskExecutor EXCEPTION!!!");
ExecutorServiceHelper.getInstance().outputCompleteException(e);
}
}
}
| bsd-3-clause |
dhis2/dhis2-core | dhis-2/dhis-services/dhis-service-core/src/test/java/org/hisp/dhis/period/PeriodStoreTest.java | 20914 | /*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.period;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.hisp.dhis.DhisSpringTest;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
/**
* @author Torgeir Lorange Ostby
* @version $Id: PeriodStoreTest.java 5983 2008-10-17 17:42:44Z larshelg $
*/
class PeriodStoreTest extends DhisSpringTest
{
@Autowired
private PeriodStore periodStore;
// -------------------------------------------------------------------------
// Period
// -------------------------------------------------------------------------
@Test
void testAddPeriod()
{
List<PeriodType> periodTypes = periodStore.getAllPeriodTypes();
Iterator<PeriodType> it = periodTypes.iterator();
PeriodType periodTypeA = it.next();
PeriodType periodTypeB = it.next();
Period periodA = new Period( periodTypeA, getDay( 1 ), getDay( 2 ) );
Period periodB = new Period( periodTypeA, getDay( 2 ), getDay( 3 ) );
Period periodC = new Period( periodTypeB, getDay( 2 ), getDay( 3 ) );
periodStore.addPeriod( periodA );
long idA = periodA.getId();
periodStore.addPeriod( periodB );
long idB = periodB.getId();
periodStore.addPeriod( periodC );
long idC = periodC.getId();
periodA = periodStore.get( idA );
assertNotNull( periodA );
assertEquals( idA, periodA.getId() );
assertEquals( periodTypeA, periodA.getPeriodType() );
assertEquals( getDay( 1 ), periodA.getStartDate() );
assertEquals( getDay( 2 ), periodA.getEndDate() );
periodB = periodStore.get( idB );
assertNotNull( periodB );
assertEquals( idB, periodB.getId() );
assertEquals( periodTypeA, periodB.getPeriodType() );
assertEquals( getDay( 2 ), periodB.getStartDate() );
assertEquals( getDay( 3 ), periodB.getEndDate() );
periodC = periodStore.get( idC );
assertNotNull( periodC );
assertEquals( idC, periodC.getId() );
assertEquals( periodTypeB, periodC.getPeriodType() );
assertEquals( getDay( 2 ), periodC.getStartDate() );
assertEquals( getDay( 3 ), periodC.getEndDate() );
}
@Test
void testDeleteAndGetPeriod()
{
List<PeriodType> periodTypes = periodStore.getAllPeriodTypes();
Iterator<PeriodType> it = periodTypes.iterator();
PeriodType periodTypeA = it.next();
PeriodType periodTypeB = it.next();
Period periodA = new Period( periodTypeA, getDay( 1 ), getDay( 2 ) );
Period periodB = new Period( periodTypeA, getDay( 2 ), getDay( 3 ) );
Period periodC = new Period( periodTypeB, getDay( 2 ), getDay( 3 ) );
Period periodD = new Period( periodTypeB, getDay( 3 ), getDay( 4 ) );
periodStore.addPeriod( periodA );
long idA = periodA.getId();
periodStore.addPeriod( periodB );
long idB = periodB.getId();
periodStore.addPeriod( periodC );
long idC = periodC.getId();
periodStore.addPeriod( periodD );
long idD = periodD.getId();
assertNotNull( periodStore.get( idA ) );
assertNotNull( periodStore.get( idB ) );
assertNotNull( periodStore.get( idC ) );
assertNotNull( periodStore.get( idD ) );
periodStore.delete( periodA );
assertNull( periodStore.get( idA ) );
assertNotNull( periodStore.get( idB ) );
assertNotNull( periodStore.get( idC ) );
assertNotNull( periodStore.get( idD ) );
periodStore.delete( periodB );
assertNull( periodStore.get( idA ) );
assertNull( periodStore.get( idB ) );
assertNotNull( periodStore.get( idC ) );
assertNotNull( periodStore.get( idD ) );
periodStore.delete( periodC );
assertNull( periodStore.get( idA ) );
assertNull( periodStore.get( idB ) );
assertNull( periodStore.get( idC ) );
assertNotNull( periodStore.get( idD ) );
periodStore.delete( periodD );
assertNull( periodStore.get( idA ) );
assertNull( periodStore.get( idB ) );
assertNull( periodStore.get( idC ) );
assertNull( periodStore.get( idD ) );
}
@Test
void testGetPeriod()
{
List<PeriodType> periodTypes = periodStore.getAllPeriodTypes();
Iterator<PeriodType> it = periodTypes.iterator();
PeriodType periodTypeA = it.next();
PeriodType periodTypeB = it.next();
Period periodA = new Period( periodTypeA, getDay( 1 ), getDay( 2 ) );
Period periodB = new Period( periodTypeA, getDay( 2 ), getDay( 3 ) );
Period periodC = new Period( periodTypeB, getDay( 2 ), getDay( 3 ) );
Period periodD = new Period( periodTypeB, getDay( 3 ), getDay( 4 ) );
Period periodE = new Period( periodTypeA, getDay( 3 ), getDay( 4 ) );
periodStore.addPeriod( periodA );
long idA = periodA.getId();
periodStore.addPeriod( periodB );
long idB = periodB.getId();
periodStore.addPeriod( periodC );
long idC = periodC.getId();
periodStore.addPeriod( periodD );
long idD = periodD.getId();
periodStore.addPeriod( periodE );
long idE = periodE.getId();
periodA = periodStore.getPeriod( getDay( 1 ), getDay( 2 ), periodTypeA );
assertNotNull( periodA );
assertEquals( idA, periodA.getId() );
assertEquals( periodTypeA, periodA.getPeriodType() );
assertEquals( getDay( 1 ), periodA.getStartDate() );
assertEquals( getDay( 2 ), periodA.getEndDate() );
periodB = periodStore.getPeriod( getDay( 2 ), getDay( 3 ), periodTypeA );
assertNotNull( periodB );
assertEquals( idB, periodB.getId() );
assertEquals( periodTypeA, periodB.getPeriodType() );
assertEquals( getDay( 2 ), periodB.getStartDate() );
assertEquals( getDay( 3 ), periodB.getEndDate() );
periodC = periodStore.getPeriod( getDay( 2 ), getDay( 3 ), periodTypeB );
assertNotNull( periodC );
assertEquals( idC, periodC.getId() );
assertEquals( periodTypeB, periodC.getPeriodType() );
assertEquals( getDay( 2 ), periodC.getStartDate() );
assertEquals( getDay( 3 ), periodC.getEndDate() );
periodD = periodStore.getPeriod( getDay( 3 ), getDay( 4 ), periodTypeB );
assertNotNull( periodD );
assertEquals( idD, periodD.getId() );
assertEquals( periodTypeB, periodD.getPeriodType() );
assertEquals( getDay( 3 ), periodD.getStartDate() );
assertEquals( getDay( 4 ), periodD.getEndDate() );
periodE = periodStore.getPeriod( getDay( 3 ), getDay( 4 ), periodTypeA );
assertNotNull( periodE );
assertEquals( idE, periodE.getId() );
assertEquals( periodTypeA, periodE.getPeriodType() );
assertEquals( getDay( 3 ), periodE.getStartDate() );
assertEquals( getDay( 4 ), periodE.getEndDate() );
assertNull( periodStore.getPeriod( getDay( 1 ), getDay( 2 ), periodTypeB ) );
assertNull( periodStore.getPeriod( getDay( 1 ), getDay( 3 ), periodTypeA ) );
assertNull( periodStore.getPeriod( getDay( 1 ), getDay( 5 ), periodTypeB ) );
assertNull( periodStore.getPeriod( getDay( 4 ), getDay( 3 ), periodTypeB ) );
assertNull( periodStore.getPeriod( getDay( 5 ), getDay( 6 ), periodTypeA ) );
}
@Test
void testGetAllPeriods()
{
PeriodType periodType = periodStore.getAllPeriodTypes().iterator().next();
Period periodA = new Period( periodType, getDay( 1 ), getDay( 1 ) );
Period periodB = new Period( periodType, getDay( 1 ), getDay( 2 ) );
Period periodC = new Period( periodType, getDay( 2 ), getDay( 3 ) );
periodStore.addPeriod( periodA );
periodStore.addPeriod( periodB );
periodStore.addPeriod( periodC );
List<Period> periods = periodStore.getAll();
assertNotNull( periods );
assertEquals( 3, periods.size() );
assertTrue( periods.contains( periodA ) );
assertTrue( periods.contains( periodB ) );
assertTrue( periods.contains( periodC ) );
}
@Test
void testGetPeriodsBetweenDates()
{
List<PeriodType> periodTypes = periodStore.getAllPeriodTypes();
Iterator<PeriodType> it = periodTypes.iterator();
PeriodType periodTypeA = it.next();
PeriodType periodTypeB = it.next();
Period periodA = new Period( periodTypeA, getDay( 1 ), getDay( 2 ) );
Period periodB = new Period( periodTypeA, getDay( 2 ), getDay( 3 ) );
Period periodC = new Period( periodTypeB, getDay( 2 ), getDay( 3 ) );
Period periodD = new Period( periodTypeB, getDay( 3 ), getDay( 4 ) );
periodStore.addPeriod( periodA );
periodStore.addPeriod( periodB );
periodStore.addPeriod( periodC );
periodStore.addPeriod( periodD );
List<Period> periods = periodStore.getPeriodsBetweenDates( getDay( 1 ), getDay( 1 ) );
assertNotNull( periods );
assertEquals( 0, periods.size() );
periods = periodStore.getPeriodsBetweenDates( getDay( 1 ), getDay( 2 ) );
assertNotNull( periods );
assertEquals( 1, periods.size() );
assertEquals( periodA, periods.iterator().next() );
periods = periodStore.getPeriodsBetweenDates( getDay( 2 ), getDay( 4 ) );
assertNotNull( periods );
assertEquals( 3, periods.size() );
assertTrue( periods.contains( periodB ) );
assertTrue( periods.contains( periodC ) );
assertTrue( periods.contains( periodD ) );
periods = periodStore.getPeriodsBetweenDates( getDay( 1 ), getDay( 5 ) );
assertNotNull( periods );
assertEquals( 4, periods.size() );
assertTrue( periods.contains( periodA ) );
assertTrue( periods.contains( periodB ) );
assertTrue( periods.contains( periodC ) );
assertTrue( periods.contains( periodD ) );
}
@Test
void testGetPeriodsBetweenOrSpanningDates()
{
List<PeriodType> periodTypes = periodStore.getAllPeriodTypes();
Iterator<PeriodType> it = periodTypes.iterator();
PeriodType periodTypeA = it.next();
PeriodType periodTypeB = it.next();
Period periodA = new Period( periodTypeA, getDay( 1 ), getDay( 2 ) );
Period periodB = new Period( periodTypeA, getDay( 2 ), getDay( 3 ) );
Period periodC = new Period( periodTypeB, getDay( 2 ), getDay( 3 ) );
Period periodD = new Period( periodTypeB, getDay( 3 ), getDay( 4 ) );
Period periodE = new Period( periodTypeB, getDay( 1 ), getDay( 4 ) );
periodStore.addPeriod( periodA );
periodStore.addPeriod( periodB );
periodStore.addPeriod( periodC );
periodStore.addPeriod( periodD );
periodStore.addPeriod( periodE );
List<Period> periods = periodStore.getPeriodsBetweenOrSpanningDates( getDay( 1 ), getDay( 1 ) );
assertNotNull( periods );
assertEquals( 2, periods.size() );
assertTrue( periods.contains( periodA ) );
assertTrue( periods.contains( periodE ) );
periods = periodStore.getPeriodsBetweenOrSpanningDates( getDay( 1 ), getDay( 2 ) );
assertNotNull( periods );
assertEquals( 2, periods.size() );
assertTrue( periods.contains( periodA ) );
assertTrue( periods.contains( periodE ) );
periods = periodStore.getPeriodsBetweenOrSpanningDates( getDay( 2 ), getDay( 3 ) );
assertNotNull( periods );
assertEquals( 3, periods.size() );
assertTrue( periods.contains( periodB ) );
assertTrue( periods.contains( periodC ) );
assertTrue( periods.contains( periodE ) );
periods = periodStore.getPeriodsBetweenOrSpanningDates( getDay( 2 ), getDay( 4 ) );
assertNotNull( periods );
assertEquals( 4, periods.size() );
assertTrue( periods.contains( periodB ) );
assertTrue( periods.contains( periodC ) );
assertTrue( periods.contains( periodD ) );
assertTrue( periods.contains( periodE ) );
}
@Test
void testGetIntersectingPeriodsByPeriodType()
{
PeriodType ypt = PeriodType.getPeriodTypeByName( YearlyPeriodType.NAME );
Date jan2006 = getDate( 2006, 1, 1 );
Date dec2006 = getDate( 2006, 12, 31 );
Date jan2007 = getDate( 2007, 1, 1 );
Date dec2007 = getDate( 2007, 12, 31 );
Period periodA = new Period( ypt, jan2006, dec2006 );
Period periodB = new Period( ypt, jan2007, dec2007 );
periodStore.addPeriod( periodA );
periodStore.addPeriod( periodB );
PeriodType mpt = PeriodType.getPeriodTypeByName( MonthlyPeriodType.NAME );
Date janstart = getDate( 2006, 1, 1 );
Date janend = getDate( 2006, 1, 31 );
Date febstart = getDate( 2006, 2, 1 );
Date febend = getDate( 2006, 2, 28 );
Date marstart = getDate( 2006, 3, 1 );
Date marend = getDate( 2006, 3, 31 );
Date aprstart = getDate( 2006, 4, 1 );
Date aprend = getDate( 2006, 4, 30 );
Date maystart = getDate( 2006, 5, 1 );
Date mayend = getDate( 2006, 5, 31 );
Date junstart = getDate( 2006, 6, 1 );
Date junend = getDate( 2006, 6, 30 );
Date julstart = getDate( 2006, 7, 1 );
Date julend = getDate( 2006, 7, 31 );
Date augstart = getDate( 2006, 8, 1 );
Date augend = getDate( 2006, 8, 31 );
Date sepstart = getDate( 2006, 9, 1 );
Date sepend = getDate( 2006, 9, 30 );
Date octstart = getDate( 2006, 10, 1 );
Date octend = getDate( 2006, 10, 31 );
Date novstart = getDate( 2006, 11, 1 );
Date novend = getDate( 2006, 11, 30 );
Date decstart = getDate( 2006, 12, 1 );
Date decend = getDate( 2006, 12, 31 );
Period periodC = new Period( mpt, janstart, janend );
Period periodD = new Period( mpt, febstart, febend );
Period periodE = new Period( mpt, marstart, marend );
Period periodF = new Period( mpt, aprstart, aprend );
Period periodG = new Period( mpt, maystart, mayend );
Period periodH = new Period( mpt, junstart, junend );
Period periodI = new Period( mpt, julstart, julend );
Period periodJ = new Period( mpt, augstart, augend );
Period periodK = new Period( mpt, sepstart, sepend );
Period periodL = new Period( mpt, octstart, octend );
Period periodM = new Period( mpt, novstart, novend );
Period periodN = new Period( mpt, decstart, decend );
periodStore.addPeriod( periodC );
periodStore.addPeriod( periodD );
periodStore.addPeriod( periodE );
periodStore.addPeriod( periodF );
periodStore.addPeriod( periodG );
periodStore.addPeriod( periodH );
periodStore.addPeriod( periodI );
periodStore.addPeriod( periodJ );
periodStore.addPeriod( periodK );
periodStore.addPeriod( periodL );
periodStore.addPeriod( periodM );
periodStore.addPeriod( periodN );
List<Period> periodsA = periodStore.getIntersectingPeriodsByPeriodType( ypt, getDate( 2006, 6, 1 ),
getDate( 2006, 11, 30 ) );
assertNotNull( periodsA );
assertEquals( 1, periodsA.size() );
List<Period> periodsB = periodStore.getIntersectingPeriodsByPeriodType( mpt, getDate( 2006, 6, 1 ),
getDate( 2006, 11, 30 ) );
assertNotNull( periodsB );
assertEquals( 6, periodsB.size() );
}
@Test
void testGetIntersectingPeriods()
{
PeriodType type = periodStore.getAllPeriodTypes().iterator().next();
Period periodA = new Period( type, getDay( 1 ), getDay( 2 ) );
Period periodB = new Period( type, getDay( 2 ), getDay( 4 ) );
Period periodC = new Period( type, getDay( 4 ), getDay( 6 ) );
Period periodD = new Period( type, getDay( 6 ), getDay( 8 ) );
Period periodE = new Period( type, getDay( 8 ), getDay( 10 ) );
Period periodF = new Period( type, getDay( 10 ), getDay( 12 ) );
Period periodG = new Period( type, getDay( 12 ), getDay( 14 ) );
Period periodH = new Period( type, getDay( 2 ), getDay( 6 ) );
Period periodI = new Period( type, getDay( 8 ), getDay( 12 ) );
Period periodJ = new Period( type, getDay( 2 ), getDay( 12 ) );
periodStore.addPeriod( periodA );
periodStore.addPeriod( periodB );
periodStore.addPeriod( periodC );
periodStore.addPeriod( periodD );
periodStore.addPeriod( periodE );
periodStore.addPeriod( periodF );
periodStore.addPeriod( periodG );
periodStore.addPeriod( periodH );
periodStore.addPeriod( periodI );
periodStore.addPeriod( periodJ );
List<Period> periods = periodStore.getIntersectingPeriods( getDay( 4 ), getDay( 10 ) );
assertEquals( periods.size(), 8 );
assertTrue( periods.contains( periodB ) );
assertTrue( periods.contains( periodC ) );
assertTrue( periods.contains( periodD ) );
assertTrue( periods.contains( periodE ) );
assertTrue( periods.contains( periodF ) );
assertTrue( periods.contains( periodH ) );
assertTrue( periods.contains( periodI ) );
assertTrue( periods.contains( periodJ ) );
}
@Test
void testGetPeriodsByPeriodType()
{
List<PeriodType> periodTypes = periodStore.getAllPeriodTypes();
Iterator<PeriodType> it = periodTypes.iterator();
PeriodType periodTypeA = it.next();
PeriodType periodTypeB = it.next();
PeriodType periodTypeC = it.next();
Period periodA = new Period( periodTypeA, getDay( 1 ), getDay( 2 ) );
Period periodB = new Period( periodTypeA, getDay( 2 ), getDay( 3 ) );
Period periodC = new Period( periodTypeA, getDay( 3 ), getDay( 4 ) );
Period periodD = new Period( periodTypeB, getDay( 3 ), getDay( 4 ) );
periodStore.addPeriod( periodA );
periodStore.addPeriod( periodB );
periodStore.addPeriod( periodC );
periodStore.addPeriod( periodD );
List<Period> periodsARef = new ArrayList<>();
periodsARef.add( periodA );
periodsARef.add( periodB );
periodsARef.add( periodC );
List<Period> periodsA = periodStore.getPeriodsByPeriodType( periodTypeA );
assertNotNull( periodsA );
assertEquals( periodsARef.size(), periodsA.size() );
assertTrue( periodsA.containsAll( periodsARef ) );
List<Period> periodsB = periodStore.getPeriodsByPeriodType( periodTypeB );
assertNotNull( periodsB );
assertEquals( 1, periodsB.size() );
assertEquals( periodD, periodsB.iterator().next() );
List<Period> periodsC = periodStore.getPeriodsByPeriodType( periodTypeC );
assertNotNull( periodsC );
assertEquals( 0, periodsC.size() );
}
}
| bsd-3-clause |
mortenoh/dhis2-core | dhis-2/dhis-support/dhis-support-system/src/main/java/org/hisp/dhis/system/util/PredicateUtils.java | 3238 | package org.hisp.dhis.system.util;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.hisp.dhis.common.IdentifiableObject;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Collection;
import java.util.function.Predicate;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
public class PredicateUtils
{
public static final Predicate<Field> idObjectCollections = new CollectionWithTypePredicate( IdentifiableObject.class );
private static class CollectionPredicate
implements Predicate<Field>
{
@Override
public boolean test( Field field )
{
return Collection.class.isAssignableFrom( field.getType() );
}
}
private static class CollectionWithTypePredicate
implements Predicate<Field>
{
private CollectionPredicate collectionPredicate = new CollectionPredicate();
private Class<?> type;
CollectionWithTypePredicate( Class<?> type )
{
this.type = type;
}
@Override
public boolean test( Field field )
{
if ( collectionPredicate.test( field ) )
{
ParameterizedType parameterizedType = (ParameterizedType) field.getGenericType();
Type[] actualTypeArguments = parameterizedType.getActualTypeArguments();
if ( actualTypeArguments.length > 0 )
{
if ( type.isAssignableFrom( (Class<?>) actualTypeArguments[0] ) )
{
return true;
}
}
}
return false;
}
}
}
| bsd-3-clause |
andrerigon/andrerigon-mockito-fork | test/org/mockitousage/spies/SpyingOnInterfacesTest.java | 1363 | /*
* Copyright (c) 2007 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockitousage.spies;
import static org.mockito.Mockito.*;
import java.util.List;
import org.junit.Test;
import org.mockito.exceptions.base.MockitoException;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.mockitoutil.TestBase;
@SuppressWarnings({"unchecked"})
public class SpyingOnInterfacesTest extends TestBase {
@Test
public void shouldFailFastWhenCallingRealMethodOnInterface() throws Exception {
List list = mock(List.class);
try {
//when
when(list.get(0)).thenCallRealMethod();
//then
fail();
} catch (MockitoException e) {}
}
@Test
public void shouldFailInRuntimeWhenCallingRealMethodOnInterface() throws Exception {
//given
List list = mock(List.class);
when(list.get(0)).thenAnswer(
new Answer() {
public Object answer(InvocationOnMock invocation) throws Throwable {
return invocation.callRealMethod();
}
}
);
try {
//when
list.get(0);
//then
fail();
} catch (MockitoException e) {}
}
} | mit |
stevenuray/XChange | xchange-ccex/src/main/java/org/knowm/xchange/ccex/dto/trade/CCEXOpenorder.java | 4881 | package org.knowm.xchange.ccex.dto.trade;
import java.math.BigDecimal;
import com.fasterxml.jackson.annotation.JsonProperty;
public class CCEXOpenorder {
private String OrderUuid;
private String Exchange;
private String OrderType;
private BigDecimal Quantity;
private BigDecimal QuantityRemaining;
private BigDecimal Limit;
private BigDecimal CommissionPaid;
private BigDecimal Price;
private BigDecimal PricePerUnit;
private String Opened;
private String Closed;
private boolean CancelInitiated;
private boolean ImmediateOrCancel;
private boolean IsConditional;
private String Condition;
private String ConditionTarget;
public CCEXOpenorder(
@JsonProperty("OrderUuid") String orderUuid,
@JsonProperty("Exchange") String exchange,
@JsonProperty("OrderType") String orderType,
@JsonProperty("Quantity") BigDecimal quantity,
@JsonProperty("QuantityRemaining") BigDecimal quantityRemaining,
@JsonProperty("Limit") BigDecimal limit,
@JsonProperty("CommissionPaid") BigDecimal commissionPaid,
@JsonProperty("Price") BigDecimal price,
@JsonProperty("PricePerUnit") BigDecimal pricePerUnit,
@JsonProperty("Opened") String opened,
@JsonProperty("Closed") String closed,
@JsonProperty("CancelInitiated") boolean cancelInitiated,
@JsonProperty("ImmediateOrCancel") boolean immediateOrCancel,
@JsonProperty("IsConditional") boolean isConditional,
@JsonProperty("Condition") String condition,
@JsonProperty("ConditionTarget") String conditionTarget) {
super();
OrderUuid = orderUuid;
Exchange = exchange;
OrderType = orderType;
Quantity = quantity;
QuantityRemaining = quantityRemaining;
Limit = limit;
CommissionPaid = commissionPaid;
Price = price;
PricePerUnit = pricePerUnit;
Opened = opened;
Closed = closed;
CancelInitiated = cancelInitiated;
ImmediateOrCancel = immediateOrCancel;
IsConditional = isConditional;
Condition = condition;
ConditionTarget = conditionTarget;
}
public String getOrderUuid() {
return OrderUuid;
}
public void setOrderUuid(String orderUuid) {
OrderUuid = orderUuid;
}
public String getExchange() {
return Exchange;
}
public void setExchange(String exchange) {
Exchange = exchange;
}
public String getOrderType() {
return OrderType;
}
public void setOrderType(String orderType) {
OrderType = orderType;
}
public BigDecimal getQuantity() {
return Quantity;
}
public void setQuantity(BigDecimal quantity) {
Quantity = quantity;
}
public BigDecimal getQuantityRemaining() {
return QuantityRemaining;
}
public void setQuantityRemaining(BigDecimal quantityRemaining) {
QuantityRemaining = quantityRemaining;
}
public BigDecimal getLimit() {
return Limit;
}
public void setLimit(BigDecimal limit) {
Limit = limit;
}
public BigDecimal getCommissionPaid() {
return CommissionPaid;
}
public void setCommissionPaid(BigDecimal commissionPaid) {
CommissionPaid = commissionPaid;
}
public BigDecimal getPrice() {
return Price;
}
public void setPrice(BigDecimal price) {
Price = price;
}
public BigDecimal getPricePerUnit() {
return PricePerUnit;
}
public void setPricePerUnit(BigDecimal pricePerUnit) {
PricePerUnit = pricePerUnit;
}
public String getOpened() {
return Opened;
}
public void setOpened(String opened) {
Opened = opened;
}
public String getClosed() {
return Closed;
}
public void setClosed(String closed) {
Closed = closed;
}
public boolean isCancelInitiated() {
return CancelInitiated;
}
public void setCancelInitiated(boolean cancelInitiated) {
CancelInitiated = cancelInitiated;
}
public boolean isImmediateOrCancel() {
return ImmediateOrCancel;
}
public void setImmediateOrCancel(boolean immediateOrCancel) {
ImmediateOrCancel = immediateOrCancel;
}
public boolean isIsConditional() {
return IsConditional;
}
public void setIsConditional(boolean isConditional) {
IsConditional = isConditional;
}
public String getCondition() {
return Condition;
}
public void setCondition(String condition) {
Condition = condition;
}
public String getConditionTarget() {
return ConditionTarget;
}
public void setConditionTarget(String conditionTarget) {
ConditionTarget = conditionTarget;
}
@Override
public String toString() {
return "CCEXOpenorder [OrderUuid=" + OrderUuid + ", Exchange=" + Exchange + ", OrderType=" + OrderType
+ ", Quantity=" + Quantity + ", QuantityRemaining=" + QuantityRemaining + ", Limit=" + Limit
+ ", CommissionPaid=" + CommissionPaid + ", Price=" + Price + ", PricePerUnit=" + PricePerUnit
+ ", Opened=" + Opened + ", Closed=" + Closed + ", CancelInitiated=" + CancelInitiated
+ ", ImmediateOrCancel=" + ImmediateOrCancel + ", IsConditional=" + IsConditional + ", Condition="
+ Condition + ", ConditionTarget=" + ConditionTarget + "]";
}
}
| mit |
pengzhao001/android-apps | StudentRegistration/src/com/findingsoft/studentregistration/ManageDepartment.java | 3266 | package com.findingsoft.studentregistration;
import gateways.DepartmentGateWay;
import java.util.ArrayList;
import utilities.Departments;
import adapters.DepartmentAdapter;
import android.app.Activity;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.Toast;
public class ManageDepartment extends Activity {
EditText dCodetxt, dNametxt;
Button btnCreateD, btnDeleteD, btnUpdateD;
ListView deptList;
DepartmentGateWay gatewayDept = new DepartmentGateWay(this);
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.manage_departments);
initialControls();
eventRegister();
proccessListView();
}
private void initialControls(){
dCodetxt = (EditText)findViewById(R.id.txtDCode);
dNametxt = (EditText)findViewById(R.id.txtDName);
btnCreateD = (Button)findViewById(R.id.btnCreateD);
btnDeleteD = (Button)findViewById(R.id.btnDeleteD);
btnUpdateD = (Button)findViewById(R.id.btnUpdateD);
deptList = (ListView)findViewById(R.id.deptListAll);
}
private void eventRegister(){
btnCreateD.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
saveDept();
}
});
btnDeleteD.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
deleteDept();
}
});
btnUpdateD.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
updateDept();
}
});
}
private void saveDept(){
if( !dCodetxt.getText().equals("") && !dNametxt.getText().equals("") ){
Departments aDept = new Departments();
aDept.setDeptCode(dCodetxt.getText().toString());
aDept.setDeptName(dNametxt.getText().toString());
String res = gatewayDept.save(aDept);
showToast(res);
proccessListView();
}
}
private void deleteDept(){
if( !dCodetxt.getText().equals("") ){
Departments aDept = new Departments();
aDept.setDeptCode(dCodetxt.getText().toString());
aDept.setDeptName(dNametxt.getText().toString());
String res = gatewayDept.deptDelete(dCodetxt.getText().toString());
showToast(res);
proccessListView();
}
}
private void updateDept(){
if( !dCodetxt.getText().equals("") && !dNametxt.getText().equals("") ){
Departments aDept = new Departments();
aDept.setDeptCode(dCodetxt.getText().toString());
aDept.setDeptName(dNametxt.getText().toString());
String res = gatewayDept.deptUpdate(aDept);
showToast(res);
proccessListView();
}
}
private void proccessListView(){
ArrayList<Departments> aDept = new ArrayList<Departments>();
aDept = gatewayDept.getAll();
loadListView(aDept);
}
private void loadListView(ArrayList<Departments> aDept){
ListView listView = (ListView)findViewById(R.id.deptListAll);
listView.setAdapter(new DepartmentAdapter(this, aDept));
}
private void showToast(String msg) {
Toast.makeText(getApplicationContext(), msg, 2000).show();
}
}
| mit |
stachon/XChange | xchange-dsx/src/main/java/org/knowm/xchange/dsx/service/DsxTradeServiceRaw.java | 4449 | package org.knowm.xchange.dsx.service;
import java.io.IOException;
import java.math.BigDecimal;
import java.time.Instant;
import java.util.Date;
import java.util.List;
import java.util.Optional;
import org.knowm.xchange.Exchange;
import org.knowm.xchange.dsx.DsxAdapters;
import org.knowm.xchange.dsx.dto.DsxBalance;
import org.knowm.xchange.dsx.dto.DsxMarketOrder;
import org.knowm.xchange.dsx.dto.DsxOrder;
import org.knowm.xchange.dsx.dto.DsxOwnTrade;
import org.knowm.xchange.dsx.dto.DsxSort;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.MarketOrder;
public class DsxTradeServiceRaw extends DsxBaseService {
public DsxTradeServiceRaw(Exchange exchange) {
super(exchange);
}
public List<DsxOrder> getOpenOrdersRaw() throws IOException {
return dsx.getDsxActiveOrders();
}
public DsxOrder placeMarketOrderRaw(MarketOrder marketOrder) throws IOException {
String symbol = DsxAdapters.adaptCurrencyPair(marketOrder.getCurrencyPair());
String side = DsxAdapters.getSide(marketOrder.getType()).toString();
String clientOrderId = null;
if (marketOrder instanceof DsxMarketOrder) {
clientOrderId = ((DsxMarketOrder) marketOrder).getClientOrderId();
}
return dsx.postDsxNewOrder(
clientOrderId,
symbol,
side,
null,
marketOrder.getOriginalAmount(),
DsxOrderType.market,
DsxTimeInForce.IOC);
}
public DsxOrder placeLimitOrderRaw(LimitOrder limitOrder, DsxTimeInForce timeInForce)
throws IOException {
String symbol = DsxAdapters.adaptCurrencyPair(limitOrder.getCurrencyPair());
String side = DsxAdapters.getSide(limitOrder.getType()).toString();
return dsx.postDsxNewOrder(
limitOrder.getUserReference(),
symbol,
side,
limitOrder.getLimitPrice(),
limitOrder.getOriginalAmount(),
DsxOrderType.limit,
timeInForce);
}
public DsxOrder placeLimitOrderRaw(LimitOrder limitOrder) throws IOException {
return placeLimitOrderRaw(limitOrder, DsxTimeInForce.GTC);
}
public DsxOrder updateMarketOrderRaw(
String clientOrderId, BigDecimal quantity, String requestClientId, Optional<BigDecimal> price)
throws IOException {
return dsx.updateDsxOrder(clientOrderId, quantity, requestClientId, price.orElse(null));
}
public DsxOrder cancelOrderRaw(String clientOrderId) throws IOException {
return dsx.cancelSingleOrder(clientOrderId);
}
public List<DsxOrder> cancelAllOrdersRaw(String symbol) throws IOException {
return dsx.cancelAllOrders(symbol);
}
public List<DsxOwnTrade> getHistorialTradesByOrder(String orderId) throws IOException {
return dsx.getHistorialTradesByOrder(orderId);
}
public List<DsxOrder> getDsxRecentOrders() throws IOException {
return dsx.getDsxRecentOrders();
}
public List<DsxOwnTrade> getTradeHistoryRaw(String symbol, Integer limit, long offset)
throws IOException {
return dsx.getDsxTrades(symbol, null, null, null, null, limit, offset);
}
public List<DsxOwnTrade> getTradeHistoryRaw(
String symbol, DsxSort sort, Date from, Date till, Integer limit, long offset)
throws IOException {
String sortValue = sort != null ? sort.toString().toUpperCase() : null;
String fromValue = from != null ? Instant.ofEpochMilli(from.getTime()).toString() : null;
String tillValue = till != null ? Instant.ofEpochMilli(till.getTime()).toString() : null;
return dsx.getDsxTrades(symbol, sortValue, "timestamp", fromValue, tillValue, limit, offset);
}
public List<DsxOwnTrade> getTradeHistoryRaw(
String symbol, DsxSort sort, Long fromId, Long tillId, Integer limit, long offset)
throws IOException {
String sortValue = sort != null ? sort.toString().toUpperCase() : null;
String fromValue = fromId != null ? fromId.toString() : null;
String tillValue = tillId != null ? tillId.toString() : null;
return dsx.getDsxTrades(symbol, sortValue, "id", fromValue, tillValue, limit, offset);
}
public DsxOrder getDsxOrder(String symbol, String clientOrderId) throws IOException {
List<DsxOrder> orders = dsx.getDsxOrder(symbol, clientOrderId);
if (orders == null || orders.isEmpty()) {
return null;
} else {
return orders.iterator().next();
}
}
public List<DsxBalance> getTradingBalance() throws IOException {
return dsx.getTradingBalance();
}
}
| mit |
asposecells/Aspose_Cells_Cloud | SDKs/Aspose.Cells-Cloud-SDK-for-Java/src/main/java/com/aspose/cells/model/Columns.java | 1890 | package com.aspose.cells.model;
import java.util.ArrayList;
import java.util.List;
public class Columns {
private Integer MaxColumn = null;
private Integer ColumnsCount = null;
private List<LinkElement> ColumnsList = new ArrayList<LinkElement>();
private Link link = null;
/**
* getMaxColumn
* Gets Integer
* @return MaxColumn
*/
public Integer getMaxColumn() {
return MaxColumn;
}
/**
* setMaxColumn
* Sets Integer
* @param MaxColumn Integer
*/
public void setMaxColumn(Integer MaxColumn) {
this.MaxColumn = MaxColumn;
}
/**
* getColumnsCount
* Gets Integer
* @return ColumnsCount
*/
public Integer getColumnsCount() {
return ColumnsCount;
}
/**
* setColumnsCount
* Sets Integer
* @param ColumnsCount Integer
*/
public void setColumnsCount(Integer ColumnsCount) {
this.ColumnsCount = ColumnsCount;
}
/**
* getColumnsList
* Gets List<LinkElement>
* @return ColumnsList
*/
public List<LinkElement> getColumnsList() {
return ColumnsList;
}
/**
* setColumnsList
* Sets List<LinkElement>
* @param ColumnsList List<LinkElement>
*/
public void setColumnsList(List<LinkElement> ColumnsList) {
this.ColumnsList = ColumnsList;
}
/**
* getLink
* Gets Link
* @return link
*/
public Link getLink() {
return link;
}
/**
* setLink
* Sets Link
* @param link Link
*/
public void setLink(Link link) {
this.link = link;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class Columns {\n");
sb.append(" MaxColumn: ").append(MaxColumn).append("\n");
sb.append(" ColumnsCount: ").append(ColumnsCount).append("\n");
sb.append(" ColumnsList: ").append(ColumnsList).append("\n");
sb.append(" link: ").append(link).append("\n");
sb.append("}\n");
return sb.toString();
}
}
| mit |
dougkoellmer/swarm | tools/appengine-java-sdk/demos/jdoexamples/src/com/google/appengine/demos/jdoexamples/GuestbookServlet.java | 1159 | /* Copyright (c) 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.appengine.demos.jdoexamples;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class GuestbookServlet extends HttpServlet {
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
GuestbookUtils.insert(req.getParameter("who"), req.getParameter("message"));
resp.sendRedirect("/guestbook.jsp");
}
}
| mit |
reinhapa/TweetwallFX | stepengine-api/src/main/java/org/tweetwallfx/stepengine/api/testcase/VisualizationFactoryLoadable.java | 1916 | /*
* The MIT License (MIT)
*
* Copyright (c) 2019 TweetWallFX
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.tweetwallfx.stepengine.api.testcase;
import java.util.ServiceLoader;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.tweetwallfx.util.testcase.RunnableTestCase;
import org.tweetwallfx.stepengine.api.Visualization;
/**
* Testcase checking that all registered {@link Visualization.Factory} instances
* are loadable.
*/
public class VisualizationFactoryLoadable implements RunnableTestCase {
private static final Logger LOG = LogManager.getLogger(VisualizationFactoryLoadable.class);
@Override
public void execute() throws Exception {
for (final Visualization.Factory o : ServiceLoader.load(Visualization.Factory.class)) {
LOG.info("loaded " + o.getClass());
}
}
}
| mit |
andyandy1992/MyMOOCs | courses/Other/Nand2Tetris(Partial)/Code/SimulatorsGUIPackageSource/SimulatorsGUI/MemorySegmentsComponent.java | 6216 | /********************************************************************************
* The contents of this file are subject to the GNU General Public License *
* (GPL) Version 2 or later (the "License"); you may not use this file except *
* in compliance with the License. You may obtain a copy of the License at *
* http://www.gnu.org/copyleft/gpl.html *
* *
* Software distributed under the License is distributed on an "AS IS" basis, *
* without warranty of any kind, either expressed or implied. See the License *
* for the specific language governing rights and limitations under the *
* License. *
* *
* This file was originally developed as part of the software suite that *
* supports the book "The Elements of Computing Systems" by Nisan and Schocken, *
* MIT Press 2005. If you modify the contents of this file, please document and *
* mark your changes clearly, for the benefit of others. *
********************************************************************************/
package SimulatorsGUI;
import javax.swing.*;
import java.awt.*;
import HackGUI.*;
/**
* This Panel contains six MemorySegmentComponents: static, local, arg,
* this, that, and temp - and provides the split pane feature between
* them.
*/
public class MemorySegmentsComponent extends JPanel {
// The spllit pane containing static and local.
private JSplitPane segmentsSplitPane1;
// The split pane between arg and the previous split pane.
private JSplitPane segmentsSplitPane2;
// The split pane between this and the previous split pane.
private JSplitPane segmentsSplitPane3;
// The split pane between that and the previous split pane.
private JSplitPane segmentsSplitPane4;
// The split pane between temp and the previous split pane.
private JSplitPane segmentsSplitPane5;
// 'Static' memory segment
private MemorySegmentComponent staticSegment;
// 'Local' memory segment
private MemorySegmentComponent localSegment;
// 'Arg' memory segment
private MemorySegmentComponent argSegment;
// 'This' memory segment
private MemorySegmentComponent thisSegment;
// 'That' memory segment
private MemorySegmentComponent thatSegment;
// 'Temp' memory segment
private MemorySegmentComponent tempSegment;
/**
* Constructs a new MemorySegmentsComponent.
*/
public MemorySegmentsComponent() {
// creating the segments and giving them names.
staticSegment = new MemorySegmentComponent();
staticSegment.setSegmentName("Static");
localSegment = new MemorySegmentComponent();
localSegment.setSegmentName("Local");
argSegment = new MemorySegmentComponent();
argSegment.setSegmentName("Argument");
thisSegment = new MemorySegmentComponent();
thisSegment.setSegmentName("This");
thatSegment = new MemorySegmentComponent();
thatSegment.setSegmentName("That");
tempSegment = new MemorySegmentComponent();
tempSegment.setSegmentName("Temp");
// creating the split panes.
segmentsSplitPane5 = new JSplitPane(JSplitPane.VERTICAL_SPLIT, thatSegment, tempSegment);
segmentsSplitPane4 = new JSplitPane(JSplitPane.VERTICAL_SPLIT, thisSegment, segmentsSplitPane5);
segmentsSplitPane3 = new JSplitPane(JSplitPane.VERTICAL_SPLIT, argSegment, segmentsSplitPane4);
segmentsSplitPane2 = new JSplitPane(JSplitPane.VERTICAL_SPLIT, localSegment, segmentsSplitPane3);
segmentsSplitPane1 = new JSplitPane(JSplitPane.VERTICAL_SPLIT, staticSegment, segmentsSplitPane2);
// providing a one touch expandable feature to the split panes.
segmentsSplitPane1.setOneTouchExpandable(true);
segmentsSplitPane2.setOneTouchExpandable(true);
segmentsSplitPane3.setOneTouchExpandable(true);
segmentsSplitPane4.setOneTouchExpandable(true);
segmentsSplitPane5.setOneTouchExpandable(true);
// disabling the automatic border of each one of the first four
// split panes. enabling the border of the fifth one.
segmentsSplitPane5.setBorder(null);
segmentsSplitPane4.setBorder(null);
segmentsSplitPane3.setBorder(null);
segmentsSplitPane2.setBorder(null);
segmentsSplitPane1.setDividerLocation(30 + staticSegment.getTable().getRowHeight() * 5);
segmentsSplitPane2.setDividerLocation(30 + localSegment.getTable().getRowHeight() * 5);
segmentsSplitPane3.setDividerLocation(30 + argSegment.getTable().getRowHeight() * 5);
segmentsSplitPane4.setDividerLocation(30 + thisSegment.getTable().getRowHeight() * 5);
segmentsSplitPane5.setDividerLocation(30 + thatSegment.getTable().getRowHeight() * 2);
segmentsSplitPane1.setSize(new Dimension(195, 587));
segmentsSplitPane1.setPreferredSize(new Dimension(195, 587));
}
/**
* Returns the split pane which contains all of the other split peanes.
*/
public JSplitPane getSplitPane() {
return segmentsSplitPane1;
}
/**
* Returns static memory segment.
*/
public MemorySegmentComponent getStaticSegment() {
return staticSegment;
}
/**
* Returns local memory segment.
*/
public MemorySegmentComponent getLocalSegment() {
return localSegment;
}
/**
* Returns arg memory segment.
*/
public MemorySegmentComponent getArgSegment() {
return argSegment;
}
/**
* Returns this memory segment.
*/
public MemorySegmentComponent getThisSegment() {
return thisSegment;
}
/**
* Returns that memory segment.
*/
public MemorySegmentComponent getThatSegment() {
return thatSegment;
}
/**
* Returns temp memory segment.
*/
public MemorySegmentComponent getTempSegment() {
return tempSegment;
}
}
| cc0-1.0 |
sguan-actuate/birt | viewer/org.eclipse.birt.report.viewer/birt/WEB-INF/classes/org/eclipse/birt/report/presentation/aggregation/dialog/PrintReportDialogFragment.java | 1404 | /*************************************************************************************
* Copyright (c) 2004 Actuate Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - Initial implementation.
************************************************************************************/
package org.eclipse.birt.report.presentation.aggregation.dialog;
import org.eclipse.birt.report.resource.BirtResources;
import org.eclipse.birt.report.resource.ResourceConstants;
/**
* Fragment help rendering print dialog in side bar.
* <p>
*
* @see BaseFragment
*/
public class PrintReportDialogFragment extends BaseDialogFragment
{
/**
* Get unique id of the corresponding UI gesture.
*
* @return id
*/
public String getClientId( )
{
return "printReportDialog"; //$NON-NLS-1$
}
/**
* Get name of the corresponding UI gesture.
*
* @return id
*/
public String getClientName( )
{
return "Print Report"; //$NON-NLS-1$
}
/**
* Gets the title ID for the html page.
*
* @return title id
*/
public String getTitle( )
{
return BirtResources.getMessage( ResourceConstants.PRINT_REPORT_DIALOG_TITLE );
}
}
| epl-1.0 |
drbgfc/mdht | hl7/plugins/org.openhealthtools.mdht.uml.hdf.tooling.rsm/src/org/openhealthtools/mdht/uml/hdf/tooling/rsm/providers/RIMEditPolicyProvider.java | 2440 | /*******************************************************************************
* Copyright (c) 2006, 2009 David A Carlson.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* David A Carlson (XMLmodeling.com) - initial API and implementation
*
* $Id$
*******************************************************************************/
package org.openhealthtools.mdht.uml.hdf.tooling.rsm.providers;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.gef.EditPart;
import org.eclipse.gmf.runtime.common.core.service.AbstractProvider;
import org.eclipse.gmf.runtime.common.core.service.IOperation;
import org.eclipse.gmf.runtime.diagram.ui.editparts.IGraphicalEditPart;
import org.eclipse.gmf.runtime.diagram.ui.services.editpolicy.CreateEditPoliciesOperation;
import org.eclipse.gmf.runtime.diagram.ui.services.editpolicy.IEditPolicyProvider;
import org.eclipse.gmf.runtime.emf.type.core.IElementType;
import org.eclipse.gmf.runtime.emf.type.core.ISpecializationType;
import org.openhealthtools.mdht.uml.hdf.tooling.rsm.types.RIMElementTypes;
/**
* @generated
*/
public class RIMEditPolicyProvider extends AbstractProvider implements IEditPolicyProvider {
private final static String PROFILE_ASSOCIATIONS_SEMANTIC_ROLE = "ProfileAssociationsSemanticRole"; //$NON-NLS-1$
/**
* @generated
*/
public void createEditPolicies(EditPart editPart) {
editPart.installEditPolicy(PROFILE_ASSOCIATIONS_SEMANTIC_ROLE, new RIMAssociationEditPolicy());
}
/**
* @generated
*/
public boolean provides(IOperation operation) {
if (operation instanceof CreateEditPoliciesOperation) {
EditPart ep = ((CreateEditPoliciesOperation) operation).getEditPart();
if (ep instanceof IGraphicalEditPart) {
IGraphicalEditPart gep = (IGraphicalEditPart) ep;
EObject element = gep.getNotationView().getElement();
if (element != null) {
for (IElementType elementType : RIMElementTypes.NODE_TYPES) {
if (elementType instanceof ISpecializationType) {
if (((ISpecializationType) elementType).getMatcher().matches(element)) {
return true;
}
}
}
}
}
}
return false;
}
}
| epl-1.0 |
amitjoy/kura | kura/org.eclipse.kura.net.admin/src/main/java/org/eclipse/kura/net/admin/FirewallConfigurationServiceImpl.java | 16946 | /*******************************************************************************
* Copyright (c) 2016 Eurotech and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eurotech - initial API and implementation
*******************************************************************************/
package org.eclipse.kura.net.admin;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.eclipse.kura.KuraErrorCode;
import org.eclipse.kura.KuraException;
import org.eclipse.kura.configuration.ComponentConfiguration;
import org.eclipse.kura.configuration.SelfConfiguringComponent;
import org.eclipse.kura.core.configuration.ComponentConfigurationImpl;
import org.eclipse.kura.core.configuration.metatype.ObjectFactory;
import org.eclipse.kura.core.configuration.metatype.Tad;
import org.eclipse.kura.core.configuration.metatype.Tocd;
import org.eclipse.kura.core.configuration.metatype.Tscalar;
import org.eclipse.kura.core.net.FirewallConfiguration;
import org.eclipse.kura.linux.net.iptables.LinuxFirewall;
import org.eclipse.kura.linux.net.iptables.LocalRule;
import org.eclipse.kura.linux.net.iptables.NATRule;
import org.eclipse.kura.linux.net.iptables.PortForwardRule;
import org.eclipse.kura.net.IP4Address;
import org.eclipse.kura.net.IPAddress;
import org.eclipse.kura.net.NetProtocol;
import org.eclipse.kura.net.NetworkPair;
import org.eclipse.kura.net.admin.event.FirewallConfigurationChangeEvent;
import org.eclipse.kura.net.firewall.FirewallAutoNatConfig;
import org.eclipse.kura.net.firewall.FirewallNatConfig;
import org.eclipse.kura.net.firewall.FirewallOpenPortConfigIP;
import org.eclipse.kura.net.firewall.FirewallOpenPortConfigIP4;
import org.eclipse.kura.net.firewall.FirewallPortForwardConfigIP;
import org.eclipse.kura.net.firewall.FirewallPortForwardConfigIP4;
import org.osgi.service.component.ComponentContext;
import org.osgi.service.event.EventAdmin;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FirewallConfigurationServiceImpl implements FirewallConfigurationService, SelfConfiguringComponent {
private static final Logger s_logger = LoggerFactory.getLogger(FirewallConfigurationServiceImpl.class);
private EventAdmin m_eventAdmin;
public void setEventAdmin(EventAdmin eventAdmin) {
this.m_eventAdmin = eventAdmin;
}
public void unsetEventAdmin(EventAdmin eventAdmin) {
this.m_eventAdmin = null;
}
protected void activate(ComponentContext componentContext, Map<String, Object> properties) {
s_logger.debug("activate()");
// we are intentionally ignoring the properties from ConfigAdmin at startup
if (properties == null) {
s_logger.debug("activate() :: Got null properties...");
} else {
for (String key : properties.keySet()) {
s_logger.debug("activate() :: Props... {}={}", key, properties.get(key));
}
}
}
protected void deactivate(ComponentContext componentContext) {
s_logger.debug("deactivate()");
}
public synchronized void updated(Map<String, Object> properties) {
s_logger.debug("updated()");
for (String key : properties.keySet()) {
s_logger.debug("updated() :: Props... {}={}", key, properties.get(key));
}
FirewallConfiguration firewallConfiguration = new FirewallConfiguration(properties);
try {
setFirewallOpenPortConfiguration(firewallConfiguration.getOpenPortConfigs());
} catch (KuraException e) {
s_logger.error("Failed to set Firewall OPen Ports Configuration - {}", e);
}
try {
setFirewallPortForwardingConfiguration(firewallConfiguration.getPortForwardConfigs());
} catch (KuraException e) {
s_logger.error("Failed to set Firewall Port Forwarding Configuration - {}", e);
}
try {
setFirewallNatConfiguration(firewallConfiguration.getNatConfigs());
} catch (KuraException e) {
s_logger.error("Failed to set Firewall NAT Configuration - {}", e);
}
// raise the event because there was a change
this.m_eventAdmin.postEvent(new FirewallConfigurationChangeEvent(properties));
}
@Override
public FirewallConfiguration getFirewallConfiguration() throws KuraException {
s_logger.debug("getting the firewall configuration");
FirewallConfiguration firewallConfiguration = new FirewallConfiguration();
LinuxFirewall firewall = LinuxFirewall.getInstance();
Iterator<LocalRule> localRules = firewall.getLocalRules().iterator();
while (localRules.hasNext()) {
LocalRule localRule = localRules.next();
if (localRule.getPortRange() != null) {
s_logger.debug("getFirewallConfiguration() :: Adding local rule for {}", localRule.getPortRange());
firewallConfiguration.addConfig(new FirewallOpenPortConfigIP4(localRule.getPortRange(),
NetProtocol.valueOf(localRule.getProtocol()), localRule.getPermittedNetwork(),
localRule.getPermittedInterfaceName(), localRule.getUnpermittedInterfaceName(),
localRule.getPermittedMAC(), localRule.getSourcePortRange()));
} else {
s_logger.debug("getFirewallConfiguration() :: Adding local rule for {}", localRule.getPort());
firewallConfiguration.addConfig(new FirewallOpenPortConfigIP4(localRule.getPort(),
NetProtocol.valueOf(localRule.getProtocol()), localRule.getPermittedNetwork(),
localRule.getPermittedInterfaceName(), localRule.getUnpermittedInterfaceName(),
localRule.getPermittedMAC(), localRule.getSourcePortRange()));
}
}
Iterator<PortForwardRule> portForwardRules = firewall.getPortForwardRules().iterator();
while (portForwardRules.hasNext()) {
PortForwardRule portForwardRule = portForwardRules.next();
try {
s_logger.debug("getFirewallConfiguration() :: Adding port forwarding - inbound iface is {}",
portForwardRule.getInboundIface());
firewallConfiguration
.addConfig(
new FirewallPortForwardConfigIP4(portForwardRule.getInboundIface(),
portForwardRule.getOutboundIface(),
(IP4Address) IPAddress.parseHostAddress(portForwardRule.getAddress()),
NetProtocol.valueOf(portForwardRule.getProtocol()), portForwardRule.getInPort(),
portForwardRule.getOutPort(), portForwardRule.isMasquerade(),
new NetworkPair<IP4Address>(
(IP4Address) IPAddress
.parseHostAddress(portForwardRule.getPermittedNetwork()),
(short) portForwardRule.getPermittedNetworkMask()),
portForwardRule.getPermittedMAC(), portForwardRule.getSourcePortRange()));
} catch (UnknownHostException e) {
e.printStackTrace();
throw new KuraException(KuraErrorCode.INTERNAL_ERROR, e);
}
}
Iterator<NATRule> autoNatRules = firewall.getAutoNatRules().iterator();
while (autoNatRules.hasNext()) {
NATRule autoNatRule = autoNatRules.next();
s_logger.debug("getFirewallConfiguration() :: Adding auto NAT rules {}", autoNatRule.getSourceInterface());
firewallConfiguration.addConfig(new FirewallAutoNatConfig(autoNatRule.getSourceInterface(),
autoNatRule.getDestinationInterface(), autoNatRule.isMasquerade()));
}
Iterator<NATRule> natRules = firewall.getNatRules().iterator();
while (natRules.hasNext()) {
NATRule natRule = natRules.next();
s_logger.debug("getFirewallConfiguration() :: Adding NAT rules {}", natRule.getSourceInterface());
firewallConfiguration.addConfig(new FirewallNatConfig(natRule.getSourceInterface(),
natRule.getDestinationInterface(), natRule.getProtocol(), natRule.getSource(),
natRule.getDestination(), natRule.isMasquerade()));
}
return firewallConfiguration;
}
@Override
public ComponentConfiguration getConfiguration() throws KuraException {
s_logger.debug("getConfiguration()");
try {
FirewallConfiguration firewallConfiguration = getFirewallConfiguration();
return new ComponentConfigurationImpl(PID, getDefinition(),
firewallConfiguration.getConfigurationProperties());
} catch (Exception e) {
e.printStackTrace();
throw new KuraException(KuraErrorCode.INTERNAL_ERROR, e);
}
}
@Override
public void setFirewallOpenPortConfiguration(
List<FirewallOpenPortConfigIP<? extends IPAddress>> firewallConfiguration) throws KuraException {
s_logger.debug("setFirewallOpenPortConfiguration() :: Deleting local rules");
LinuxFirewall firewall = LinuxFirewall.getInstance();
firewall.deleteAllLocalRules();
ArrayList<LocalRule> localRules = new ArrayList<LocalRule>();
for (FirewallOpenPortConfigIP<? extends IPAddress> openPortEntry : firewallConfiguration) {
if (openPortEntry.getPermittedNetwork() == null
|| openPortEntry.getPermittedNetwork().getIpAddress() == null) {
try {
openPortEntry
.setPermittedNetwork(new NetworkPair(IPAddress.parseHostAddress("0.0.0.0"), (short) 0));
} catch (UnknownHostException e) {
e.printStackTrace();
}
}
try {
LocalRule localRule = null;
if (openPortEntry.getPortRange() != null) {
s_logger.debug("setFirewallOpenPortConfiguration() :: Adding local rule for: {}",
openPortEntry.getPortRange());
localRule = new LocalRule(openPortEntry.getPortRange(), openPortEntry.getProtocol().name(),
new NetworkPair(
IPAddress.parseHostAddress(
openPortEntry.getPermittedNetwork().getIpAddress().getHostAddress()),
openPortEntry.getPermittedNetwork().getPrefix()),
openPortEntry.getPermittedInterfaceName(), openPortEntry.getUnpermittedInterfaceName(),
openPortEntry.getPermittedMac(), openPortEntry.getSourcePortRange());
} else {
s_logger.debug("setFirewallOpenPortConfiguration() :: Adding local rule for: {}",
openPortEntry.getPort());
localRule = new LocalRule(openPortEntry.getPort(), openPortEntry.getProtocol().name(),
new NetworkPair(
IPAddress.parseHostAddress(
openPortEntry.getPermittedNetwork().getIpAddress().getHostAddress()),
openPortEntry.getPermittedNetwork().getPrefix()),
openPortEntry.getPermittedInterfaceName(), openPortEntry.getUnpermittedInterfaceName(),
openPortEntry.getPermittedMac(), openPortEntry.getSourcePortRange());
}
localRules.add(localRule);
} catch (Exception e) {
s_logger.error("setFirewallOpenPortConfiguration() :: Failed to add local rule for: {} - {}",
openPortEntry.getPort(), e);
}
}
firewall.addLocalRules(localRules);
}
@Override
public void setFirewallPortForwardingConfiguration(
List<FirewallPortForwardConfigIP<? extends IPAddress>> firewallConfiguration) throws KuraException {
s_logger.debug("setFirewallPortForwardingConfiguration() :: Deleting port forward rules");
LinuxFirewall firewall = LinuxFirewall.getInstance();
firewall.deleteAllPortForwardRules();
ArrayList<PortForwardRule> portForwardRules = new ArrayList<PortForwardRule>();
for (FirewallPortForwardConfigIP<? extends IPAddress> portForwardEntry : firewallConfiguration) {
s_logger.debug("setFirewallPortForwardingConfiguration() :: Adding port forward rule for: {}",
portForwardEntry.getInPort());
if (portForwardEntry.getPermittedNetwork() == null
|| portForwardEntry.getPermittedNetwork().getIpAddress() == null) {
try {
portForwardEntry
.setPermittedNetwork(new NetworkPair(IPAddress.parseHostAddress("0.0.0.0"), (short) 0));
} catch (UnknownHostException e) {
e.printStackTrace();
}
}
PortForwardRule portForwardRule = new PortForwardRule(portForwardEntry.getInboundInterface(),
portForwardEntry.getOutboundInterface(), portForwardEntry.getAddress().getHostAddress(),
portForwardEntry.getProtocol().name(), portForwardEntry.getInPort(), portForwardEntry.getOutPort(),
portForwardEntry.isMasquerade(),
portForwardEntry.getPermittedNetwork().getIpAddress().getHostAddress(),
portForwardEntry.getPermittedNetwork().getPrefix(), portForwardEntry.getPermittedMac(),
portForwardEntry.getSourcePortRange());
portForwardRules.add(portForwardRule);
}
firewall.addPortForwardRules(portForwardRules);
}
@Override
public void setFirewallNatConfiguration(List<FirewallNatConfig> natConfigs) throws KuraException {
LinuxFirewall firewall = LinuxFirewall.getInstance();
firewall.deleteAllNatRules();
ArrayList<NATRule> natRules = new ArrayList<NATRule>();
for (FirewallNatConfig natConfig : natConfigs) {
NATRule natRule = new NATRule(natConfig.getSourceInterface(), natConfig.getDestinationInterface(),
natConfig.getProtocol(), natConfig.getSource(), natConfig.getDestination(),
natConfig.isMasquerade());
natRules.add(natRule);
}
firewall.addNatRules(natRules);
}
private Tocd getDefinition() throws KuraException {
ObjectFactory objectFactory = new ObjectFactory();
Tocd tocd = objectFactory.createTocd();
tocd.setName("FirewallConfigurationService");
tocd.setId("org.eclipse.kura.net.admin.FirewallConfigurationService");
tocd.setDescription("Firewall Configuration Service");
Tad tad = objectFactory.createTad();
tad.setId(FirewallConfiguration.OPEN_PORTS_PROP_NAME);
tad.setName(FirewallConfiguration.OPEN_PORTS_PROP_NAME);
tad.setType(Tscalar.STRING);
tad.setCardinality(10000);
tad.setRequired(true);
tad.setDefault(FirewallConfiguration.DFLT_OPEN_PORTS_VALUE);
tad.setDescription(NetworkAdminConfigurationMessages.getMessage(NetworkAdminConfiguration.PLATFORM_INTERFACES));
tocd.addAD(tad);
tad = objectFactory.createTad();
tad.setId(FirewallConfiguration.PORT_FORWARDING_PROP_NAME);
tad.setName(FirewallConfiguration.PORT_FORWARDING_PROP_NAME);
tad.setType(Tscalar.STRING);
tad.setCardinality(10000);
tad.setRequired(true);
tad.setDefault(FirewallConfiguration.DFLT_PORT_FORWARDING_VALUE);
tad.setDescription(NetworkAdminConfigurationMessages.getMessage(NetworkAdminConfiguration.PLATFORM_INTERFACES));
tocd.addAD(tad);
tad = objectFactory.createTad();
tad.setId(FirewallConfiguration.NAT_PROP_NAME);
tad.setName(FirewallConfiguration.NAT_PROP_NAME);
tad.setType(Tscalar.STRING);
tad.setCardinality(10000);
tad.setRequired(true);
tad.setDefault(FirewallConfiguration.DFLT_NAT_VALUE);
tad.setDescription(NetworkAdminConfigurationMessages.getMessage(NetworkAdminConfiguration.PLATFORM_INTERFACES));
tocd.addAD(tad);
return tocd;
}
}
| epl-1.0 |
TheNetStriker/openhab | bundles/binding/org.openhab.binding.velux/src/main/java/org/openhab/binding/velux/bridge/VeluxBridgeGetProducts.java | 1742 | /**
* Copyright (c) 2010-2019 by the respective copyright holders.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.binding.velux.bridge;
import org.openhab.binding.velux.internal.config.VeluxBridgeConfiguration;
import org.openhab.binding.velux.things.VeluxExistingProducts;
import org.openhab.binding.velux.things.VeluxProduct;
/**
* The {@link VeluxBridgeGetProducts} represents a complete set of transactions
* for retrieving of any available products into a structure {@link VeluxExistingProducts}
* defined on the <B>Velux</B> bridge.
* <P>
* It therefore provides a method
* <UL>
* <LI>{@link VeluxBridgeGetProducts#getProducts} for retrieval of information.
* </UL>
* Any parameters are controlled by {@link VeluxBridgeConfiguration}.
*
* @see VeluxProduct
* @see VeluxExistingProducts
*
* @author Guenther Schreiner - Initial contribution
* @since 1.13.0
*/
@Deprecated
public class VeluxBridgeGetProducts {
/**
* Login into bridge, retrieve all products and logout from bridge based
* on a well-prepared environment of a {@link VeluxBridgeProvider}. The results
* are stored within a public structure {@link org.openhab.binding.velux.things.VeluxExistingProducts
* VeluxExistingProducts}.
*
* @param bridge Initialized Velux bridge handler.
* @return <b>success</b>
* of type boolean describing the overall result of this interaction.
*/
public boolean getProducts(VeluxBridgeProvider bridge) {
return false;
}
}
| epl-1.0 |
drbgfc/mdht | hl7/plugins/org.openhealthtools.mdht.emf.hl7.mif2/src/org/openhealthtools/mdht/emf/w3c/xhtml/Q.java | 6922 | /*******************************************************************************
* Copyright (c) 2006, 2009 David A Carlson
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* David A Carlson (XMLmodeling.com) - initial API and implementation
*******************************************************************************/
package org.openhealthtools.mdht.emf.w3c.xhtml;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Q</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* <ul>
* <li>{@link org.openhealthtools.mdht.emf.w3c.xhtml.Q#getCite1 <em>Cite1</em>}</li>
* <li>{@link org.openhealthtools.mdht.emf.w3c.xhtml.Q#getClass_ <em>Class</em>}</li>
* <li>{@link org.openhealthtools.mdht.emf.w3c.xhtml.Q#getLang <em>Lang</em>}</li>
* <li>{@link org.openhealthtools.mdht.emf.w3c.xhtml.Q#getStyle <em>Style</em>}</li>
* </ul>
* </p>
*
* @see org.openhealthtools.mdht.emf.w3c.xhtml.XhtmlPackage#getQ()
* @model extendedMetaData="name='Q' kind='mixed'"
* @generated
*/
public interface Q extends Inline {
/**
* Returns the value of the '<em><b>Cite1</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* <!-- begin-model-doc -->
* Provides a reference to the external source from which the quote is extracted.
* <!-- end-model-doc -->
* @return the value of the '<em>Cite1</em>' attribute.
* @see #setCite1(String)
* @see org.openhealthtools.mdht.emf.w3c.xhtml.XhtmlPackage#getQ_Cite1()
* @model dataType="org.openhealthtools.mdht.emf.w3c.xhtml.URI"
* extendedMetaData="kind='attribute' name='cite'"
* @generated
*/
String getCite1();
/**
* Sets the value of the '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Q#getCite1 <em>Cite1</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Cite1</em>' attribute.
* @see #getCite1()
* @generated
*/
void setCite1(String value);
/**
* Returns the value of the '<em><b>Class</b></em>' attribute.
* The literals are from the enumeration {@link org.openhealthtools.mdht.emf.w3c.xhtml.MifClassType}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* <!-- begin-model-doc -->
* space separated list of classes
* <!-- end-model-doc -->
* @return the value of the '<em>Class</em>' attribute.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.MifClassType
* @see #isSetClass()
* @see #unsetClass()
* @see #setClass(MifClassType)
* @see org.openhealthtools.mdht.emf.w3c.xhtml.XhtmlPackage#getQ_Class()
* @model unsettable="true"
* extendedMetaData="kind='attribute' name='class'"
* @generated
*/
MifClassType getClass_();
/**
* Sets the value of the '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Q#getClass_ <em>Class</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Class</em>' attribute.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.MifClassType
* @see #isSetClass()
* @see #unsetClass()
* @see #getClass_()
* @generated
*/
void setClass(MifClassType value);
/**
* Unsets the value of the '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Q#getClass_ <em>Class</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetClass()
* @see #getClass_()
* @see #setClass(MifClassType)
* @generated
*/
void unsetClass();
/**
* Returns whether the value of the '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Q#getClass_ <em>Class</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Class</em>' attribute is set.
* @see #unsetClass()
* @see #getClass_()
* @see #setClass(MifClassType)
* @generated
*/
boolean isSetClass();
/**
* Returns the value of the '<em><b>Lang</b></em>' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* <!-- begin-model-doc -->
* language code (as per XML 1.0 spec)
* <!-- end-model-doc -->
* @return the value of the '<em>Lang</em>' attribute.
* @see #setLang(String)
* @see org.openhealthtools.mdht.emf.w3c.xhtml.XhtmlPackage#getQ_Lang()
* @model dataType="org.eclipse.emf.ecore.xml.namespace.LangType"
* extendedMetaData="kind='attribute' name='lang' namespace='http://www.w3.org/XML/1998/namespace'"
* @generated
*/
String getLang();
/**
* Sets the value of the '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Q#getLang <em>Lang</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Lang</em>' attribute.
* @see #getLang()
* @generated
*/
void setLang(String value);
/**
* Returns the value of the '<em><b>Style</b></em>' attribute.
* The literals are from the enumeration {@link org.openhealthtools.mdht.emf.w3c.xhtml.StyleSheet}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* <!-- begin-model-doc -->
* associated style info
* <!-- end-model-doc -->
* @return the value of the '<em>Style</em>' attribute.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.StyleSheet
* @see #isSetStyle()
* @see #unsetStyle()
* @see #setStyle(StyleSheet)
* @see org.openhealthtools.mdht.emf.w3c.xhtml.XhtmlPackage#getQ_Style()
* @model unsettable="true"
* extendedMetaData="kind='attribute' name='style'"
* @generated
*/
StyleSheet getStyle();
/**
* Sets the value of the '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Q#getStyle <em>Style</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Style</em>' attribute.
* @see org.openhealthtools.mdht.emf.w3c.xhtml.StyleSheet
* @see #isSetStyle()
* @see #unsetStyle()
* @see #getStyle()
* @generated
*/
void setStyle(StyleSheet value);
/**
* Unsets the value of the '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Q#getStyle <em>Style</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isSetStyle()
* @see #getStyle()
* @see #setStyle(StyleSheet)
* @generated
*/
void unsetStyle();
/**
* Returns whether the value of the '{@link org.openhealthtools.mdht.emf.w3c.xhtml.Q#getStyle <em>Style</em>}' attribute is set.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return whether the value of the '<em>Style</em>' attribute is set.
* @see #unsetStyle()
* @see #getStyle()
* @see #setStyle(StyleSheet)
* @generated
*/
boolean isSetStyle();
} // Q
| epl-1.0 |
mcculls/sisu.plexus | org.eclipse.sisu.plexus/src/org/eclipse/sisu/plexus/RequirementImpl.java | 4280 | /*******************************************************************************
* Copyright (c) 2010-present Sonatype, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Stuart McCulloch (Sonatype, Inc.) - initial API and implementation
*******************************************************************************/
package org.eclipse.sisu.plexus;
import java.lang.annotation.Annotation;
import java.util.Arrays;
import java.util.List;
import org.codehaus.plexus.component.annotations.Requirement;
import org.eclipse.sisu.inject.DeferredClass;
import org.eclipse.sisu.space.LoadedClass;
/**
* Runtime implementation of Plexus @{@link Requirement} annotation.
*/
public final class RequirementImpl
implements Requirement
{
// ----------------------------------------------------------------------
// Implementation fields
// ----------------------------------------------------------------------
private final DeferredClass<?> role;
private final boolean optional;
private final String hint;
private final String[] hints;
// ----------------------------------------------------------------------
// Constructors
// ----------------------------------------------------------------------
public RequirementImpl( final DeferredClass<?> role, final boolean optional, final List<String> hints )
{
if ( null == role || null == hints || hints.contains( null ) )
{
throw new IllegalArgumentException( "@Requirement cannot contain null values" );
}
this.role = role;
this.optional = optional;
final int length = hints.size();
if ( length == 0 )
{
hint = "";
this.hints = Hints.NO_HINTS;
}
else if ( length == 1 )
{
hint = hints.get( 0 );
this.hints = Hints.NO_HINTS;
}
else
{
hint = "";
this.hints = hints.toArray( new String[length] );
}
}
/**
* For testing purposes only.
*/
@Deprecated
public RequirementImpl( final Class<?> role, final boolean optional, final String... hints )
{
this( new LoadedClass<Object>( role ), optional, Arrays.asList( hints ) );
}
// ----------------------------------------------------------------------
// Annotation properties
// ----------------------------------------------------------------------
public Class<?> role()
{
return role.load();
}
public boolean optional()
{
return optional;
}
public String hint()
{
return hint;
}
public String[] hints()
{
return hints.clone();
}
// ----------------------------------------------------------------------
// Standard annotation behaviour
// ----------------------------------------------------------------------
@Override
public boolean equals( final Object rhs )
{
if ( this == rhs )
{
return true;
}
if ( rhs instanceof Requirement )
{
final Requirement req = (Requirement) rhs;
return role().equals( req.role() ) && optional == req.optional() && hint.equals( req.hint() )
&& Arrays.equals( hints, req.hints() );
}
return false;
}
@Override
public int hashCode()
{
return ( 127 * "role".hashCode() ^ role().hashCode() )
+ ( 127 * "optional".hashCode() ^ Boolean.valueOf( optional ).hashCode() )
+ ( 127 * "hint".hashCode() ^ hint.hashCode() ) + ( 127 * "hints".hashCode() ^ Arrays.hashCode( hints ) );
}
@Override
public String toString()
{
return String.format( "@%s(hints=%s, optional=%b, role=%s, hint=%s)", Requirement.class.getName(),
Arrays.toString( hints ), Boolean.valueOf( optional ), role(), hint );
}
public Class<? extends Annotation> annotationType()
{
return Requirement.class;
}
}
| epl-1.0 |
alastrina123/debrief | org.mwc.asset.comms/docs/restlet_src/org.restlet.ext.xml/org/restlet/ext/xml/internal/ContextResolver.java | 3540 | /**
* Copyright 2005-2010 Noelios Technologies.
*
* The contents of this file are subject to the terms of one of the following
* open source licenses: LGPL 3.0 or LGPL 2.1 or CDDL 1.0 or EPL 1.0 (the
* "Licenses"). You can select the license that you prefer but you may not use
* this file except in compliance with one of these Licenses.
*
* You can obtain a copy of the LGPL 3.0 license at
* http://www.opensource.org/licenses/lgpl-3.0.html
*
* You can obtain a copy of the LGPL 2.1 license at
* http://www.opensource.org/licenses/lgpl-2.1.php
*
* You can obtain a copy of the CDDL 1.0 license at
* http://www.opensource.org/licenses/cddl1.php
*
* You can obtain a copy of the EPL 1.0 license at
* http://www.opensource.org/licenses/eclipse-1.0.php
*
* See the Licenses for the specific language governing permissions and
* limitations under the Licenses.
*
* Alternatively, you can obtain a royalty free commercial license with less
* limitations, transferable or non-transferable, directly at
* http://www.noelios.com/products/restlet-engine
*
* Restlet is a registered trademark of Noelios Technologies.
*/
package org.restlet.ext.xml.internal;
import java.io.IOException;
import java.util.logging.Level;
import javax.xml.transform.Source;
import javax.xml.transform.TransformerException;
import javax.xml.transform.URIResolver;
import javax.xml.transform.stream.StreamSource;
import org.restlet.Context;
import org.restlet.Request;
import org.restlet.Response;
import org.restlet.data.Method;
import org.restlet.data.Reference;
/**
* URI resolver based on a Restlet Context instance.
*
* @author Jerome Louvel
*/
public class ContextResolver implements URIResolver {
/** The Restlet context. */
private final Context context;
/**
* Constructor.
*
* @param context
* The Restlet context.
*/
public ContextResolver(Context context) {
this.context = context;
}
/**
* Resolves a target reference into a Source document.
*
* @see javax.xml.transform.URIResolver#resolve(java.lang.String,
* java.lang.String)
*/
public Source resolve(String href, String base) throws TransformerException {
Source result = null;
if (this.context != null) {
Reference targetRef = null;
if ((base != null) && !base.equals("")) {
// Potentially a relative reference
Reference baseRef = new Reference(base);
targetRef = new Reference(baseRef, href);
} else {
// No base, assume "href" is an absolute URI
targetRef = new Reference(href);
}
String targetUri = targetRef.getTargetRef().toString();
Response response = this.context.getClientDispatcher().handle(
new Request(Method.GET, targetUri));
if (response.getStatus().isSuccess()
&& response.isEntityAvailable()) {
try {
result = new StreamSource(response.getEntity().getStream());
result.setSystemId(targetUri);
} catch (IOException e) {
this.context.getLogger().log(Level.WARNING,
"I/O error while getting the response stream", e);
}
}
}
return result;
}
} | epl-1.0 |
davleopo/graal-core | graal/com.oracle.graal.compiler.test/src/com/oracle/graal/compiler/test/InstalledCodeInvalidationTest.java | 2684 | /*
* Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.compiler.test;
import jdk.vm.ci.code.InstalledCode;
import jdk.vm.ci.code.InvalidInstalledCodeException;
import org.junit.Test;
import com.oracle.graal.api.directives.GraalDirectives;
public class InstalledCodeInvalidationTest extends GraalCompilerTest {
public void recurse(InstalledCode code, int depth) throws InvalidInstalledCodeException {
if (depth > 1) {
/*
* Recurse a few times to ensure there are multiple activations.
*/
code.executeVarargs(this, code, depth - 1);
} else {
/*
* Deoptimize this activation and make the compiled code no longer usable.
*/
GraalDirectives.deoptimizeAndInvalidate();
assert code.isAlive() && !code.isValid();
code.invalidate();
assert !code.isAlive();
}
if (GraalDirectives.inCompiledCode()) {
/*
* If this still in compiled code then the deoptimizeAndInvalidate call above didn't
* remove all existing activations.
*/
throw new InternalError();
}
}
/**
* Test that after uncommon trapping in an installed code it's still possible to invalidate all
* existing activations of that installed code.
*
* @throws InvalidInstalledCodeException
*/
@Test
public void testInstalledCodeInvalidation() throws InvalidInstalledCodeException {
InstalledCode code = getCode(getMetaAccess().lookupJavaMethod(getMethod("recurse")));
code.executeVarargs(this, code, 3);
}
}
| gpl-2.0 |
AntumDeluge/arianne-stendhal | src/games/stendhal/client/gui/chattext/CharacterMap.java | 3590 | /***************************************************************************
* (C) Copyright 2013 Faiumoni e.V. *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client.gui.chattext;
import java.awt.GridLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.AbstractButton;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JMenuItem;
import javax.swing.JPopupMenu;
import javax.swing.text.BadLocationException;
import javax.swing.text.JTextComponent;
import org.apache.log4j.Logger;
/**
* A drop down menu for selecting special characters that players may want to
* use in chat.
*/
public class CharacterMap extends JButton {
/**
* Create a new CharacterMap.
*
* @param textField text field where selected character should be inserted
*/
public CharacterMap(final JTextComponent textField) {
super("☺");
setFocusable(false);
setToolTipText("Insert a special character");
final JPopupMenu menu = new JPopupMenu();
addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
// Place the menu right justified to the button
menu.show(CharacterMap.this, getWidth() - menu.getPreferredSize().width, getHeight());
}
});
ActionListener selectionListener = new ActionListener() {
@Override
public void actionPerformed(ActionEvent ev) {
Object source = ev.getSource();
if (source instanceof AbstractButton) {
String str = ((AbstractButton) source).getText();
int pos = textField.getCaretPosition();
try {
textField.getDocument().insertString(pos, str, null);
} catch (BadLocationException ex) {
Logger.getLogger(CharacterMap.class).error("Bug", ex);
}
}
}
};
fillMenu(menu, selectionListener);
}
/**
* Fill the popup menu with characters.
*
* @param menu popup menu
* @param listener action listener that should be attached to the menu items
*/
private void fillMenu(JComponent menu, ActionListener listener) {
String[][] characters = {
{ "☺", "☹", "😃", "😲", "😇", "😈", "😊", "😌", "😍", "😎", "😏", "😐", "😴" },
{ "🐭", "🐮", "🐱", "🐵", "🐯", "🐰", "🐴", "🐶", "🐷", "🐹", "🐺", "🐻", "🐼" },
{ "♥", "♡", "💔", "💡", "☠" },
{ "£", "$", "€", "₤", "₱", "¥" },
{ "♩", "♪", "♫", "♬", "♭", "♮", "♯", "𝄞", "𝄢" } };
menu.setLayout(new GridLayout(0, characters[0].length));
Insets insets = new Insets(1, 1, 1, 1);
setMargin(insets);
for (String[] row : characters) {
for (String chr : row) {
JMenuItem item = new JMenuItem(chr);
item.setMargin(insets);
item.addActionListener(listener);
item.setBorder(null);
item.setHorizontalTextPosition(CENTER);
menu.add(item);
}
}
}
}
| gpl-2.0 |
AntumDeluge/arianne-stendhal | tests/games/stendhal/server/core/reflectiondebugger/MockChildClass.java | 1470 | /* $Id$ */
/***************************************************************************
* (C) Copyright 2003-2010 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.server.core.reflectiondebugger;
import java.io.Serializable;
/**
* This class is used to test the reflection code.
*
* @author hendrik
*/
public class MockChildClass extends MockParentClass implements Serializable {
// note this serialVersionUID is automatically created by emma
// so we create it here anyway to simplify testing with and without
// emma
private static final long serialVersionUID = 550331563324952898L;
public boolean childPublicBoolean = true;
// this class is used by reflection
@SuppressWarnings("unused")
private float childPrivateFloat = 2.0f;
}
| gpl-2.0 |
timp21337/melati-old | melati/src/main/java/org/melati/servlet/FormDataAdaptorFactory.java | 3293 | /*
* $Source$
* $Revision$
*
* Copyright (C) 2000 Myles Chippendale
*
* Part of Melati (http://melati.org), a framework for the rapid
* development of clean, maintainable web applications.
*
* Melati is free software; Permission is granted to copy, distribute
* and/or modify this software under the terms either:
*
* a) the GNU General Public License as published by the Free Software
* Foundation; either version 2 of the License, or (at your option)
* any later version,
*
* or
*
* b) any version of the Melati Software License, as published
* at http://melati.org
*
* You should have received a copy of the GNU General Public License and
* the Melati Software License along with this program;
* if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA to obtain the
* GNU General Public License and visit http://melati.org to obtain the
* Melati Software License.
*
* Feel free to contact the Developers of Melati (http://melati.org),
* if you would like to work out a different arrangement than the options
* outlined here. It is our intention to allow Melati to be used by as
* wide an audience as possible.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* Contact details for copyright holder:
*
* Mylesc Chippendale <mylesc AT paneris.org>
* http://paneris.org/
* 29 Stanley Road, Oxford, OX4 1QY, UK
*/
package org.melati.servlet;
import java.util.Hashtable;
import org.melati.Melati;
/**
* A way to implement policies about how to save uploaded files.
*/
public abstract class FormDataAdaptorFactory {
/**
* We need to establish the user and set up any request specific melati stuff
* so that we can verify the user has permission for this task, and use
* melati Table / Objects to manipulate what the FormDataAdaptor does.
*
* Please note that when uploading a file for a record that has not yet been
* inserted (ie whilst adding), melati.getObject will return null.
*
* @param melati the {@link Melati}
* @param field a {@link MultipartFormField} to process
* @return a new {@link FormDataAdaptor}
*/
public FormDataAdaptor get(final Melati melati,
final MultipartFormField field) {
final Hashtable<String, FormDataAdaptor> holder = new Hashtable<String, FormDataAdaptor>();
if (melati.getDatabase() == null) {
holder.put("hereiam",getIt(melati,field));
} else {
melati.loadTableAndObject();
holder.put("hereiam",getIt(melati,field));
}
return holder.get("hereiam");
}
/**
* Implements different policies for saving uploaded files depending
* on the details of the file and the state of the application.
*
* @param melati the state of (this call to) the application
* @param field details of the uploaded file
* @return an adaptor which will save the contents of the file
*/
public abstract FormDataAdaptor getIt(Melati melati,
MultipartFormField field);
}
| gpl-2.0 |
mvehar/zanata-server | zanata-war/src/main/java/org/zanata/service/SearchService.java | 2902 | /*
* Copyright 2010, Red Hat, Inc. and individual contributors as indicated by the
* @author tags. See the copyright.txt file in the distribution for a full
* listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This software is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this software; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA, or see the FSF
* site: http://www.fsf.org.
*/
package org.zanata.service;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.QueryParam;
import org.zanata.rest.dto.ReindexStatus;
/**
* @author Carlos Munoz <a
* href="mailto:camunoz@redhat.com">camunoz@redhat.com</a>
*/
public interface SearchService {
/**
* Requests the start of a system reindex. NOTE: This is not a stable,
* supported API. It might change from release to release.
*
* @param purgeAll
* Purges all indexes.
* @param indexAll
* Reindexes all elements.
* @param optimizeAll
* Optimizes all indexes.
* @return The following response status codes will be returned from this
* operation:<br>
* OK(200) - Response containing the Indexing process' status.<br>
* INTERNAL SERVER ERROR(500) - If there is an unexpected error in
* the server while performing this operation.
*/
@POST
@Path("/reindex/start")
ReindexStatus startReindex(@QueryParam("purge") boolean purgeAll,
@QueryParam("index") boolean indexAll,
@QueryParam("optimize") boolean optimizeAll);
/**
* Returns the status of a system search reindex operation. NOTE: This is
* not a stable, supported API. It might change from release to release.
*
* @return The following response status codes will be returned from this
* operation:<br>
* OK(200) - Response containing the Indexing process' status if one
* is in progress.<br>
* NOT FOUND(404) - If there is no indexing task currently in
* progress.<br>
* INTERNAL SERVER ERROR(500) - If there is an unexpected error in
* the server while performing this operation.
*/
@GET
@Path("/reindex")
ReindexStatus getReindexStatus();
}
| gpl-2.0 |
afrous/Cynthia | src/main/java/com/sogou/qadev/service/cynthia/dao/HomeFilterAccessSessionMySQL.java | 2671 | package com.sogou.qadev.service.cynthia.dao;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import com.sogou.qadev.service.cynthia.service.DbPoolConnection;
/**
* @description:home filter db processor
* @author:liming
* @mail:liming@sogou-inc.com
* @date:2014-5-6 下午5:37:53
* @version:v1.0
*/
public class HomeFilterAccessSessionMySQL {
public HomeFilterAccessSessionMySQL() {
}
/**
* @description:get user home filter from db
* @date:2014-5-6 下午5:38:05
* @version:v1.0
* @param userName
* @return
*/
public String getHomeFilter(String userName)
{
PreparedStatement pstm = null;
Connection conn = null;
ResultSet rs = null;
String filterIdStr = "";
try
{
conn = DbPoolConnection.getInstance().getReadConnection();
String sql = "select filter_id from home_filter as A JOIN filter as B on A.filter_id = B.id where A.user_name= ? and B.is_valid = 1";
pstm = conn.prepareStatement(sql);
pstm.setString(1, userName);
rs = pstm.executeQuery();
if(rs.next())
{
filterIdStr = rs.getString("filter_id");
}
}catch(Exception e)
{
e.printStackTrace();
}finally
{
DbPoolConnection.getInstance().closeAll(rs, pstm, conn);
return filterIdStr;
}
}
/**
* @description:add user home filter
* @date:2014-5-6 下午5:38:17
* @version:v1.0
* @param userName
* @param filterId
* @return
*/
public boolean addHomeFilter(String userName,String filterId)
{
PreparedStatement pstm = null;
Connection conn = null;
try
{
conn = DbPoolConnection.getInstance().getConnection();
String sql = "insert into home_filter (user_name,filter_id) values (?,?)";
pstm = conn.prepareStatement(sql);
pstm.setString(1, userName);
pstm.setString(2, filterId);
return pstm.execute();
}catch(Exception e)
{
e.printStackTrace();
}finally
{
DbPoolConnection.getInstance().closeAll(pstm, conn);
}
return false;
}
/**
* @description:update user home filter
* @date:2014-5-6 下午5:38:26
* @version:v1.0
* @param userName
* @param filterId
* @return
*/
public boolean updateHomeFilter(String userName,String filterId)
{
PreparedStatement pstm = null;
Connection conn = null;
try
{
conn = DbPoolConnection.getInstance().getConnection();
String sql = "update home_filter set filter_id=? where user_name=?";
pstm = conn.prepareStatement(sql);
pstm.setString(2, userName);
pstm.setString(1, filterId);
return pstm.execute();
}catch(Exception e)
{
e.printStackTrace();
}finally
{
DbPoolConnection.getInstance().closeAll(pstm, conn);
}
return false;
}
}
| gpl-2.0 |
Xtrememicrotech/XtremeD-Dev | host/src/main/java/org/area515/resinprinter/display/PrinterDisplayFrame.java | 5168 | package org.area515.resinprinter.display;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.GraphicsDevice;
import java.awt.HeadlessException;
import java.awt.Rectangle;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import javax.imageio.ImageIO;
import javax.swing.JFrame;
import javax.swing.JPanel;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.area515.resinprinter.printer.Printer.DisplayState;
import org.area515.util.Log4jUtil;
public class PrinterDisplayFrame extends JFrame implements GraphicsOutputInterface {
private static final long serialVersionUID = 5024551291098098753L;
private static final Logger logger = LogManager.getLogger();
private String IMAGE_REALIZE_TIMER = "Image Realize:";
private DisplayState displayState = DisplayState.Blank;
private int gridSquareSize;
private int calibrationX;
private int calibrationY;
private BufferedImage displayImage;
private int sliceNumber;
private boolean isSimulatedDisplay;
private String displayId;
private class DoubleBufferedJPanel extends JPanel {
private static final long serialVersionUID = 5629943117146058839L;
@Override
protected void paintComponent(Graphics g) {
//we need to add this method back in because some UV light engines require it.
super.paintComponent(g);
Graphics2D g2 = (Graphics2D)g;
Rectangle screenSize = getGraphicsConfiguration().getBounds();
switch (displayState) {
case Blank :
g2.setBackground(Color.black);
g2.clearRect(0, 0, screenSize.width, screenSize.height);
logger.debug("Blank realized:{}", () -> Log4jUtil.completeGlobalTimer(IMAGE_REALIZE_TIMER));
return;
case Grid :
GraphicsOutputInterface.showGrid(g2, screenSize, gridSquareSize);
logger.debug("Grid realized:{}", () -> Log4jUtil.completeGlobalTimer(IMAGE_REALIZE_TIMER));
return;
case Calibration :
GraphicsOutputInterface.showCalibration(g2, screenSize, calibrationX, calibrationY);
logger.debug("Calibration realized:{}", () -> Log4jUtil.completeGlobalTimer(IMAGE_REALIZE_TIMER));
return;
case CurrentSlice :
logger.trace("Writing paintComponent1aboutToDisplay:{}", () -> Log4jUtil.logImage(displayImage, "paintComponent1aboutToDisplay.png"));
g2.drawImage(displayImage, null, screenSize.width / 2 - displayImage.getWidth() / 2, screenSize.height / 2 - displayImage.getHeight() / 2);
if (isSimulatedDisplay) {
g2.setColor(Color.RED);
g2.setFont(getFont());
g2.drawString("Slice:" + sliceNumber, getInsets().left, getInsets().top + g2.getFontMetrics().getHeight());
}
logger.debug("Image realized:{}", () -> Log4jUtil.completeGlobalTimer(IMAGE_REALIZE_TIMER));
return;
}
}
}
public PrinterDisplayFrame(String displayId) throws HeadlessException {
super();
this.displayId = displayId;
this.isSimulatedDisplay = true;
getRootPane().setBackground(Color.black);
getContentPane().setBackground(Color.black);
add(new DoubleBufferedJPanel());
IMAGE_REALIZE_TIMER += hashCode();
}
public PrinterDisplayFrame(GraphicsDevice device) {
super(device.getDefaultConfiguration());
this.isSimulatedDisplay = false;
getRootPane().setBackground(Color.black);
getContentPane().setBackground(Color.black);
add(new DoubleBufferedJPanel());
IMAGE_REALIZE_TIMER += hashCode();
}
public DisplayState getDisplayState() {
return displayState;
}
public void setDisplayState(DisplayState displayState) {
this.displayState = displayState;
}
public void resetSliceCount() {
sliceNumber = 0;
}
public void showBlankImage() {
logger.debug("Blank assigned:{}", () -> Log4jUtil.startGlobalTimer(IMAGE_REALIZE_TIMER));
setDisplayState(DisplayState.Blank);
repaint();
}
public void showCalibrationImage(int xPixels, int yPixels) {
logger.debug("Calibration assigned:{}", () -> Log4jUtil.startGlobalTimer(IMAGE_REALIZE_TIMER));
setDisplayState(DisplayState.Calibration);
calibrationX = xPixels;
calibrationY = yPixels;
repaint();
}
public void showGridImage(int pixels) {
logger.debug("Grid assigned:{}", () -> Log4jUtil.startGlobalTimer(IMAGE_REALIZE_TIMER));
setDisplayState(DisplayState.Grid);
gridSquareSize = pixels;
repaint();
}
public void showImage(BufferedImage image, boolean performFullUpdate) {
logger.debug("Image assigned:{}", () -> Log4jUtil.startGlobalTimer(IMAGE_REALIZE_TIMER));
if (performFullUpdate) {
sliceNumber++;
}
setDisplayState(DisplayState.CurrentSlice);
displayImage = image;
repaint();
}
@Override
public Rectangle getBoundary() {
return getGraphicsConfiguration().getBounds();
}
@Override
public boolean isDisplayBusy() {
return false;
}
@Override
public String getIDstring() {
return displayId;
}
@Override
public String buildIDString() {
throw new IllegalStateException("You should never call buildIDString from this class");
}
@Override
public GraphicsOutputInterface initializeDisplay(String displayId) {
throw new IllegalStateException("You should never call initializeDisplay from this class");
}
}
| gpl-3.0 |
nvoron23/opensearchserver | src/main/java/com/jaeksoft/searchlib/script/commands/IndexDocumentCommands.java | 4085 | /**
* License Agreement for OpenSearchServer
*
* Copyright (C) 2013-2015 Emmanuel Keller / Jaeksoft
*
* http://www.open-search-server.com
*
* This file is part of OpenSearchServer.
*
* OpenSearchServer is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* OpenSearchServer is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenSearchServer.
* If not, see <http://www.gnu.org/licenses/>.
**/
package com.jaeksoft.searchlib.script.commands;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import com.jaeksoft.searchlib.Client;
import com.jaeksoft.searchlib.SearchLibException;
import com.jaeksoft.searchlib.analysis.LanguageEnum;
import com.jaeksoft.searchlib.index.IndexDocument;
import com.jaeksoft.searchlib.script.CommandAbstract;
import com.jaeksoft.searchlib.script.CommandEnum;
import com.jaeksoft.searchlib.script.ScriptCommandContext;
import com.jaeksoft.searchlib.script.ScriptException;
public class IndexDocumentCommands {
public static class New extends CommandAbstract {
public New() {
super(CommandEnum.INDEX_DOCUMENT_NEW);
}
@Override
public void run(ScriptCommandContext context, String id,
String... parameters) throws ScriptException {
IndexDocument indexDocument = new IndexDocument(
LanguageEnum.findByNameOrCode(getParameterString(1)));
context.addIndexDocument(indexDocument);
}
}
public static class AddValue extends CommandAbstract {
public AddValue() {
super(CommandEnum.INDEX_DOCUMENT_ADD_VALUE);
}
@Override
public void run(ScriptCommandContext context, String id,
String... parameters) throws ScriptException {
checkParameters(2, parameters);
IndexDocument indexDocument = context.getIndexDocument();
if (indexDocument == null)
throwError("No index document has been created. Call INDEX_DOCUMENT_NEW.");
String field = getParameterString(0);
String value = getParameterString(1);
value = context.replaceVariables(value);
Float boost = getParameterFloat(2);
indexDocument.add(field, value, boost == null ? 1.0F : boost);
}
}
public static class AddNow extends CommandAbstract {
public AddNow() {
super(CommandEnum.INDEX_DOCUMENT_ADD_NOW);
}
@Override
public void run(ScriptCommandContext context, String id,
String... parameters) throws ScriptException {
checkParameters(2, parameters);
IndexDocument indexDocument = context.getIndexDocument();
if (indexDocument == null)
throwError("No index document has been created. Call INDEX_DOCUMENT_NEW.");
String field = getParameterString(0);
String format = getParameterString(1);
format = context.replaceVariables(format);
SimpleDateFormat df = new SimpleDateFormat(format);
String value = df.format(new Date());
Float boost = getParameterFloat(2);
indexDocument.add(field, value, boost == null ? 1.0F : boost);
}
}
public static class Update extends CommandAbstract {
public Update() {
super(CommandEnum.INDEX_DOCUMENT_UPDATE);
}
@Override
public void run(ScriptCommandContext context, String id,
String... parameters) throws ScriptException {
List<IndexDocument> indexDocuments = context.getIndexDocuments();
if (CollectionUtils.isEmpty(indexDocuments))
return;
Client client = (Client) context.getConfig();
try {
context.clearIndexDocuments(client
.updateDocuments(indexDocuments));
} catch (IOException e) {
throw new ScriptException(e);
} catch (SearchLibException e) {
throw new ScriptException(e);
}
}
}
}
| gpl-3.0 |
isel-leic-mpd/mpd-2017-i41d | aula33-weather-async/src/main/java/util/Comparators.java | 2718 | /*
* Copyright (c) 2017, Miguel Gamboa
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package util;
import java.util.Comparator;
import java.util.function.Function;
/**
* @author Miguel Gamboa
* created on 26-04-2017
*/
public class Comparators {
public static <T, R extends Comparable<R>> ComparatorBy<T> comparing(Function<T, R> prop) {
return (o1, o2) -> prop.apply(o1).compareTo(prop.apply(o2));
}
public interface ComparatorBy<T> extends Comparator<T> {
public default ComparatorBy<T> invert() {
return (o1, o2) -> compare(o2, o1);
}
public default <R extends Comparable<R>> ComparatorBy<T> thenBy(Function<T, R> prop) {
Comparator<T> then = Comparators.comparing(prop); // Comparador criado 1 x
return (o1, o2) -> {
int res = compare(o1, o2);
if(res != 0) return res;
return then.compare(o1, o2); // Captura da variável then
};
}
}
}
/*
public class Comparators {
public static <T, R extends Comparable<R>> ComparatorBy<T> comparing(Function<T, R> prop) {
Comparator<T> cmp = (o1, o2) -> prop.apply(o1).compareTo(prop.apply(o2));
return new ComparatorBy<T>(cmp);
}
public static class ComparatorBy<T> implements Comparator<T> {
final Comparator<T> cmp;
public ComparatorBy(Comparator<T> cmp) {
this.cmp = cmp;
}
@Override
public int compare(T o1, T o2) {
return cmp.compare(o1, o2);
}
public ComparatorBy<T> invert() {
return new ComparatorBy<>((o1, o2) -> cmp.compare(o2, o1));
}
public <R extends Comparable<R>> ComparatorBy<T> thenBy(Function<T, R> prop) {
Comparator<T> then = Comparators.comparing(prop); // Comparador criado 1 x
return new ComparatorBy<>((o1, o2) -> {
int res = compare(o1, o2);
if(res != 0) return res;
return then.compare(o1, o2); // Captura da variável then
});
}
}
}
*/ | gpl-3.0 |
Scrik/Cauldron-1 | eclipse/cauldron/src/main/java/org/bukkit/entity/minecart/PoweredMinecart.java | 269 | package org.bukkit.entity.minecart;
import org.bukkit.entity.Minecart;
/**
* Represents a powered minecart. A powered minecart moves on its own when a
* player deposits {@link org.bukkit.Material#COAL fuel}.
*/
public interface PoweredMinecart extends Minecart {
} | gpl-3.0 |
jdahaldev/itsimple | src/gui/SplashScreen.java | 3009 | /***
* itSIMPLE: Integrated Tool Software Interface for Modeling PLanning Environments
*
* Copyright (C) 2007,2008 Universidade de Sao Paulo
*
* This file is part of itSIMPLE.
*
* itSIMPLE is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version. Other licenses might be available
* upon written agreement.
*
* itSIMPLE is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with itSIMPLE. If not, see <http://www.gnu.org/licenses/>.
*
* Authors: Tiago S. Vaquero,
* Victor Romero.
**/
package src.gui;
// SplashScreen.java
//A simple application to show a title screen in the center of the screen
//for the amount of time given in the constructor. This class includes
//a sample main() method to test the splash screen, but it's meant for use
//with other applications.
//
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Image;
import java.awt.Toolkit;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import javax.swing.ImageIcon;
import javax.swing.JWindow;
public class SplashScreen extends JWindow implements Runnable {
/**
*
*/
private static final long serialVersionUID = -7720533134526695367L;
private int duration;
private Image splash;
int width;
int height;
public SplashScreen(int d) {
duration = d;
setAlwaysOnTop(true);
addMouseListener(new MouseAdapter(){
public void mouseClicked(MouseEvent e){
setVisible(false);
dispose();
}
});
}
// A simple little method to show a title screen in the center
// of the screen for the amount of time given in the constructor
public void showSplash() {
ImageIcon splashImage = new ImageIcon("resources/images/Splash.png");
splash = splashImage.getImage();
width = splashImage.getIconWidth();
height = splashImage.getIconHeight();
// Set the window's bounds, centering the window
Dimension screen = Toolkit.getDefaultToolkit().getScreenSize();
int x = (screen.width - width) / 2;
int y = (screen.height - height) / 2;
setBounds(x, y, width, height);
// Display it
setVisible(true);
// wait
pauseExec(duration);
// close it
setVisible(false);
}
public void showSplashAndExit() {
showSplash();
System.exit(0);
}
private void pauseExec(long dur){
try{
Thread.sleep(dur);
}
catch (InterruptedException ie){}
}
public void paint(Graphics g) {
g.drawImage(splash,0,0,width, height, this);
}
public void run() {
showSplash();
}
}
| gpl-3.0 |
danielyc/test-1.9.4 | build/tmp/recompileMc/sources/net/minecraft/item/ItemExpBottle.java | 1697 | package net.minecraft.item;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.entity.item.EntityExpBottle;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.SoundEvents;
import net.minecraft.stats.StatList;
import net.minecraft.util.ActionResult;
import net.minecraft.util.EnumActionResult;
import net.minecraft.util.EnumHand;
import net.minecraft.util.SoundCategory;
import net.minecraft.world.World;
import net.minecraftforge.fml.relauncher.Side;
import net.minecraftforge.fml.relauncher.SideOnly;
public class ItemExpBottle extends Item
{
public ItemExpBottle()
{
this.setCreativeTab(CreativeTabs.MISC);
}
@SideOnly(Side.CLIENT)
public boolean hasEffect(ItemStack stack)
{
return true;
}
public ActionResult<ItemStack> onItemRightClick(ItemStack itemStackIn, World worldIn, EntityPlayer playerIn, EnumHand hand)
{
if (!playerIn.capabilities.isCreativeMode)
{
--itemStackIn.stackSize;
}
worldIn.playSound((EntityPlayer)null, playerIn.posX, playerIn.posY, playerIn.posZ, SoundEvents.ENTITY_EXPERIENCE_BOTTLE_THROW, SoundCategory.NEUTRAL, 0.5F, 0.4F / (itemRand.nextFloat() * 0.4F + 0.8F));
if (!worldIn.isRemote)
{
EntityExpBottle entityexpbottle = new EntityExpBottle(worldIn, playerIn);
entityexpbottle.setHeadingFromThrower(playerIn, playerIn.rotationPitch, playerIn.rotationYaw, -20.0F, 0.7F, 1.0F);
worldIn.spawnEntityInWorld(entityexpbottle);
}
playerIn.addStat(StatList.getObjectUseStats(this));
return new ActionResult(EnumActionResult.SUCCESS, itemStackIn);
}
} | gpl-3.0 |
AdrianLxM/AndroidAPS | app/src/main/java/com/cozmo/danar/util/BleCommandUtil.java | 7137 | package com.cozmo.danar.util;
import android.content.Context;
import info.nightscout.androidaps.MainApp;
public class BleCommandUtil {
public static final int DANAR_PACKET__TYPE_ENCRYPTION_REQUEST = 0x01;
public static final int DANAR_PACKET__TYPE_ENCRYPTION_RESPONSE = 0x02;
public static final int DANAR_PACKET__TYPE_COMMAND = 0xA1;
public static final int DANAR_PACKET__TYPE_RESPONSE = 0xB2;
public static final int DANAR_PACKET__TYPE_NOTIFY = 0xC3;
public static final int DANAR_PACKET__OPCODE_ENCRYPTION__PUMP_CHECK = 0x00;
public static final int DANAR_PACKET__OPCODE_ENCRYPTION__CHECK_PASSKEY = 0xD0;
public static final int DANAR_PACKET__OPCODE_ENCRYPTION__PASSKEY_REQUEST = 0xD1;
public static final int DANAR_PACKET__OPCODE_ENCRYPTION__PASSKEY_RETURN = 0xD2;
public static final int DANAR_PACKET__OPCODE_ENCRYPTION__TIME_INFORMATION = 0x01;
public static final int DANAR_PACKET__OPCODE_NOTIFY__DELIVERY_COMPLETE = 0x01;
public static final int DANAR_PACKET__OPCODE_NOTIFY__DELIVERY_RATE_DISPLAY = 0x02;
public static final int DANAR_PACKET__OPCODE_NOTIFY__ALARM = 0x03;
public static final int DANAR_PACKET__OPCODE_NOTIFY__MISSED_BOLUS_ALARM = 0x04;
public static final int DANAR_PACKET__OPCODE_REVIEW__INITIAL_SCREEN_INFORMATION = 0x02;
public static final int DANAR_PACKET__OPCODE_REVIEW__DELIVERY_STATUS = 0x03;
public static final int DANAR_PACKET__OPCODE_REVIEW__GET_PASSWORD = 0x04;
public static final int DANAR_PACKET__OPCODE_REVIEW__BOLUS_AVG = 0x10;
public static final int DANAR_PACKET__OPCODE_REVIEW__BOLUS = 0x11;
public static final int DANAR_PACKET__OPCODE_REVIEW__DAILY = 0x12;
public static final int DANAR_PACKET__OPCODE_REVIEW__PRIME = 0x13;
public static final int DANAR_PACKET__OPCODE_REVIEW__REFILL = 0x14;
public static final int DANAR_PACKET__OPCODE_REVIEW__BLOOD_GLUCOSE = 0x15;
public static final int DANAR_PACKET__OPCODE_REVIEW__CARBOHYDRATE = 0x16;
public static final int DANAR_PACKET__OPCODE_REVIEW__TEMPORARY = 0x17;
public static final int DANAR_PACKET__OPCODE_REVIEW__SUSPEND = 0x18;
public static final int DANAR_PACKET__OPCODE_REVIEW__ALARM = 0x19;
public static final int DANAR_PACKET__OPCODE_REVIEW__BASAL = 0x1A;
public static final int DANAR_PACKET__OPCODE_REVIEW__ALL_HISTORY = 0x1F;
public static final int DANAR_PACKET__OPCODE_REVIEW__GET_SHIPPING_INFORMATION = 0x20;
public static final int DANAR_PACKET__OPCODE_REVIEW__GET_PUMP_CHECK = 0x21;
public static final int DANAR_PACKET__OPCODE_REVIEW__GET_USER_TIME_CHANGE_FLAG = 0x22;
public static final int DANAR_PACKET__OPCODE_REVIEW__SET_USER_TIME_CHANGE_FLAG_CLEAR = 0x23;
public static final int DANAR_PACKET__OPCODE_REVIEW__GET_MORE_INFORMATION = 0x24;
public static final int DANAR_PACKET__OPCODE_REVIEW__SET_HISTORY_UPLOAD_MODE = 0x25;
public static final int DANAR_PACKET__OPCODE_REVIEW__GET_TODAY_DELIVERY_TOTAL = 0x26;
public static final int DANAR_PACKET__OPCODE_BOLUS__GET_STEP_BOLUS_INFORMATION = 0x40;
public static final int DANAR_PACKET__OPCODE_BOLUS__GET_EXTENDED_BOLUS_STATE = 0x41;
public static final int DANAR_PACKET__OPCODE_BOLUS__GET_EXTENDED_BOLUS = 0x42;
public static final int DANAR_PACKET__OPCODE_BOLUS__GET_DUAL_BOLUS = 0x43;
public static final int DANAR_PACKET__OPCODE_BOLUS__SET_STEP_BOLUS_STOP = 0x44;
public static final int DANAR_PACKET__OPCODE_BOLUS__GET_CARBOHYDRATE_CALCULATION_INFORMATION = 0x45;
public static final int DANAR_PACKET__OPCODE_BOLUS__GET_EXTENDED_MENU_OPTION_STATE = 0x46;
public static final int DANAR_PACKET__OPCODE_BOLUS__SET_EXTENDED_BOLUS = 0x47;
public static final int DANAR_PACKET__OPCODE_BOLUS__SET_DUAL_BOLUS = 0x48;
public static final int DANAR_PACKET__OPCODE_BOLUS__SET_EXTENDED_BOLUS_CANCEL = 0x49;
public static final int DANAR_PACKET__OPCODE_BOLUS__SET_STEP_BOLUS_START = 0x4A;
public static final int DANAR_PACKET__OPCODE_BOLUS__GET_CALCULATION_INFORMATION = 0x4B;
public static final int DANAR_PACKET__OPCODE_BOLUS__GET_BOLUS_RATE = 0x4C;
public static final int DANAR_PACKET__OPCODE_BOLUS__SET_BOLUS_RATE = 0x4D;
public static final int DANAR_PACKET__OPCODE_BOLUS__GET_CIR_CF_ARRAY = 0x4E;
public static final int DANAR_PACKET__OPCODE_BOLUS__SET_CIR_CF_ARRAY = 0x4F;
public static final int DANAR_PACKET__OPCODE_BOLUS__GET_BOLUS_OPTION = 0x50;
public static final int DANAR_PACKET__OPCODE_BOLUS__SET_BOLUS_OPTION = 0x51;
public static final int DANAR_PACKET__OPCODE_BASAL__SET_TEMPORARY_BASAL = 0x60;
public static final int DANAR_PACKET__OPCODE_BASAL__TEMPORARY_BASAL_STATE = 0x61;
public static final int DANAR_PACKET__OPCODE_BASAL__CANCEL_TEMPORARY_BASAL = 0x62;
public static final int DANAR_PACKET__OPCODE_BASAL__GET_PROFILE_NUMBER = 0x63;
public static final int DANAR_PACKET__OPCODE_BASAL__SET_PROFILE_NUMBER = 0x64;
public static final int DANAR_PACKET__OPCODE_BASAL__GET_PROFILE_BASAL_RATE = 0x65;
public static final int DANAR_PACKET__OPCODE_BASAL__SET_PROFILE_BASAL_RATE = 0x66;
public static final int DANAR_PACKET__OPCODE_BASAL__GET_BASAL_RATE = 0x67;
public static final int DANAR_PACKET__OPCODE_BASAL__SET_BASAL_RATE = 0x68;
public static final int DANAR_PACKET__OPCODE_BASAL__SET_SUSPEND_ON = 0x69;
public static final int DANAR_PACKET__OPCODE_BASAL__SET_SUSPEND_OFF = 0x6A;
public static final int DANAR_PACKET__OPCODE_OPTION__GET_PUMP_TIME = 0x70;
public static final int DANAR_PACKET__OPCODE_OPTION__SET_PUMP_TIME = 0x71;
public static final int DANAR_PACKET__OPCODE_OPTION__GET_USER_OPTION = 0x72;
public static final int DANAR_PACKET__OPCODE_OPTION__SET_USER_OPTION = 0x73;
public static final int DANAR_PACKET__OPCODE_BASAL__APS_SET_TEMPORARY_BASAL = 0xC1;
public static final int DANAR_PACKET__OPCODE__APS_HISTORY_EVENTS = 0xC2;
public static final int DANAR_PACKET__OPCODE__APS_SET_EVENT_HISTORY = 0xC3;
public static final int DANAR_PACKET__OPCODE_ETC__SET_HISTORY_SAVE = 0xE0;
public static final int DANAR_PACKET__OPCODE_ETC__KEEP_CONNECTION = 0xFF;
static {
System.loadLibrary("BleCommandUtil");
}
private static native byte[] getEncryptedPacketJni(Object context, int opcode, byte[] bytes, String deviceName);
private static native byte[] getDecryptedPacketJni(Object context, byte[] bytes);
// ---------------------------------------------------------
private static BleCommandUtil mInstance = null;
public static BleCommandUtil getInstance() {
if (mInstance == null) {
mInstance = new BleCommandUtil();
}
return mInstance;
}
// ---------------------------------------------------------
public byte[] getEncryptedPacket(int opcode, byte[] bytes, String deviceName) {
return getEncryptedPacketJni(MainApp.instance().getApplicationContext(), opcode, bytes, deviceName);
}
public byte[] getDecryptedPacket(byte[] bytes) {
return getDecryptedPacketJni(MainApp.instance().getApplicationContext(), bytes);
}
}
| agpl-3.0 |
tdefilip/opennms | features/system-report/src/main/java/org/opennms/systemreport/SystemReport.java | 10041 | /*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2010-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.systemreport;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.io.IOUtils;
import org.opennms.core.soa.ServiceRegistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.support.ClassPathXmlApplicationContext;
public class SystemReport {
private static final Logger LOG = LoggerFactory.getLogger(SystemReport.class);
final static Pattern m_pattern = Pattern.compile("^-D(.*?)=(.*)$");
/**
* @param args
*/
public static void main(final String[] args) throws Exception {
final String tempdir = System.getProperty("java.io.tmpdir");
// pull out -D defines first
for (final String arg : args) {
if (arg.startsWith("-D") && arg.contains("=")) {
final Matcher m = m_pattern.matcher(arg);
if (m.matches()) {
System.setProperty(m.group(1), m.group(2));
}
}
}
if (System.getProperty("opennms.home") == null) {
System.setProperty("opennms.home", tempdir);
}
if (System.getProperty("rrd.base.dir") == null) {
System.setProperty("rrd.base.dir", tempdir);
}
if (System.getProperty("rrd.binary") == null) {
System.setProperty("rrd.binary", "/usr/bin/rrdtool");
}
final CommandLineParser parser = new PosixParser();
final Options options = new Options();
options.addOption("h", "help", false, "this help");
options.addOption("D", "define", true, "define a java property");
options.addOption("p", "list-plugins", false, "list the available system report plugins");
options.addOption("u", "use-plugins", true, "select the plugins to output");
options.addOption("l", "list-formats", false, "list the available output formats");
options.addOption("f", "format", true, "the format to output");
options.addOption("o", "output", true, "the file to write output to");
final CommandLine line = parser.parse(options, args, false);
final Set<String> plugins = new LinkedHashSet<String>();
final SystemReport report = new SystemReport();
// help
if (line.hasOption("h")) {
final HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("system-report.sh [options]", options);
System.exit(0);
}
// format and output file
if (line.hasOption("f")) {
report.setFormat(line.getOptionValue("f"));
}
if (line.hasOption("o")) {
report.setOutput(line.getOptionValue("o"));
}
if (line.hasOption("u")) {
final String value = line.getOptionValue("u");
if (value != null) {
for (final String s : value.split(",+")) {
plugins.add(s);
}
}
}
// final command
if (line.hasOption("p")) {
report.listPlugins();
} else if (line.hasOption("l")) {
report.listFormats();
} else {
report.writePluginData(plugins);
}
}
private ServiceRegistry m_serviceRegistry;
private ClassPathXmlApplicationContext m_context;
private String m_output = "-";
private String m_format = "text";
private void setOutput(final String file) {
m_output = file;
}
private void setFormat(final String format) {
m_format = format;
}
private void writePluginData(final Collection<String> plugins) {
initializeSpring();
SystemReportFormatter formatter = null;
for (final SystemReportFormatter f : getFormatters()) {
if (m_format.equals(f.getName())) {
formatter = f;
break;
}
}
if (formatter == null) {
LOG.error("Unknown format '{}'!", m_format);
System.exit(1);
}
formatter.setOutput(m_output);
OutputStream stream = null;
if (formatter.needsOutputStream()) {
if (m_output.equals("-")) {
stream = System.out;
} else {
try {
final File f = new File(m_output);
if(!f.delete()) {
LOG.warn("Could not delete file: {}", f.getPath());
}
stream = new FileOutputStream(f, false);
} catch (final FileNotFoundException e) {
LOG.error("Unable to write to '{}'", m_output, e);
System.exit(1);
}
}
if (m_output.equals("-") && !formatter.canStdout()) {
LOG.error("{} formatter does not support writing to STDOUT!", formatter.getName());
System.exit(1);
}
formatter.setOutputStream(stream);
}
final int pluginSize = plugins.size();
final Map<String,SystemReportPlugin> pluginMap = new HashMap<String,SystemReportPlugin>();
for (final SystemReportPlugin plugin : getPlugins()) {
final String name = plugin.getName();
if (pluginSize == 0) plugins.add(name);
pluginMap.put(name, plugin);
}
try {
formatter.begin();
if (stream != null) stream.flush();
for (final String pluginName : plugins) {
final SystemReportPlugin plugin = pluginMap.get(pluginName);
if (plugin == null) {
LOG.warn("No plugin named '{}' found, skipping.", pluginName);
} else {
try {
formatter.write(plugin);
} catch (final Exception e) {
LOG.error("An error occurred calling plugin '{}'", plugin.getName(), e);
}
if (stream != null) stream.flush();
}
}
formatter.end();
if (stream != null) stream.flush();
} catch (final Exception e) {
LOG.error("An error occurred writing plugin data to output.", e);
System.exit(1);
}
IOUtils.closeQuietly(stream);
}
private void listPlugins() {
for (final SystemReportPlugin plugin : getPlugins()) {
System.err.println(plugin.getName() + ": " + plugin.getDescription());
}
}
private void listFormats() {
for (final SystemReportFormatter formatter : getFormatters()) {
System.err.println(formatter.getName() + ": " + formatter.getDescription());
}
}
public List<SystemReportPlugin> getPlugins() {
initializeSpring();
final List<SystemReportPlugin> plugins = new ArrayList<SystemReportPlugin>(m_serviceRegistry.findProviders(SystemReportPlugin.class));
Collections.sort(plugins);
return plugins;
}
public List<SystemReportFormatter> getFormatters() {
initializeSpring();
final List<SystemReportFormatter> formatters = new ArrayList<SystemReportFormatter>(m_serviceRegistry.findProviders(SystemReportFormatter.class));
Collections.sort(formatters);
return formatters;
}
private void initializeSpring() {
if (m_serviceRegistry == null) {
List<String> configs = new ArrayList<String>();
configs.add("classpath:/META-INF/opennms/applicationContext-soa.xml");
configs.add("classpath:/META-INF/opennms/applicationContext-commonConfigs.xml");
configs.add("classpath:/META-INF/opennms/applicationContext-dao.xml");
configs.add("classpath*:/META-INF/opennms/component-dao.xml");
configs.add("classpath:/META-INF/opennms/applicationContext-systemReport.xml");
m_context = new ClassPathXmlApplicationContext(configs.toArray(new String[0]));
m_serviceRegistry = (ServiceRegistry) m_context.getBean("serviceRegistry");
}
}
public void setServiceRegistry(final ServiceRegistry registry) {
m_serviceRegistry = registry;
}
}
| agpl-3.0 |
olivermay/geomajas | face/geomajas-face-gwt/client/src/main/java/org/geomajas/gwt/client/map/event/LayerSelectedEvent.java | 1307 | /*
* This is part of Geomajas, a GIS framework, http://www.geomajas.org/.
*
* Copyright 2008-2013 Geosparc nv, http://www.geosparc.com/, Belgium.
*
* The program is available in open source according to the GNU Affero
* General Public License. All contributions in this program are covered
* by the Geomajas Contributors License Agreement. For full licensing
* details, see LICENSE.txt in the project root.
*/
package org.geomajas.gwt.client.map.event;
import org.geomajas.annotation.Api;
import org.geomajas.gwt.client.map.layer.Layer;
import com.google.gwt.event.shared.GwtEvent;
/**
* Event that reports the selection of a layer.
*
* @author Pieter De Graef
* @since 1.6.0
*/
@Api(allMethods = true)
public class LayerSelectedEvent extends GwtEvent<LayerSelectionHandler> {
private Layer<?> layer;
/**
* Constructor.
*
* @param layer selected layer
*/
public LayerSelectedEvent(Layer<?> layer) {
this.layer = layer;
}
@Override
public Type<LayerSelectionHandler> getAssociatedType() {
return LayerSelectionHandler.TYPE;
}
@Override
protected void dispatch(LayerSelectionHandler selectLayerHandler) {
selectLayerHandler.onSelectLayer(this);
}
/**
* Get selected layer.
*
* @return selected layer
*/
public Layer<?> getLayer() {
return layer;
}
}
| agpl-3.0 |
KunjanSharma/gwt-chronoscope | chronoscope-api/src/main/java/org/timepedia/chronoscope/client/event/ChartClickEvent.java | 1231 | package org.timepedia.chronoscope.client.event;
import org.timepedia.chronoscope.client.XYPlot;
import org.timepedia.exporter.client.Export;
import org.timepedia.exporter.client.ExportPackage;
import org.timepedia.exporter.client.Exportable;
/**
* Fired by plot implementations when click occurs on chart which is not handled
* by a marker or by a focus-on-point click event.
*/
@ExportPackage("chronoscope")
public class ChartClickEvent extends PlotEvent<ChartClickHandler> implements Exportable {
public static Type<ChartClickHandler> TYPE
= new Type<ChartClickHandler>();
private int x;
@Export
/**
* X coordinate of click event relative to left border of plot area.
*/
public double getX() {
return x - getPlot().getBounds().x;
}
/**
* Y coordinate of click event relative to top border of plot area.
*/
@Export
public double getY() {
return y - getPlot().getBounds().y;
}
private int y;
public ChartClickEvent(XYPlot plot, int x, int y) {
super(plot);
this.x = x;
this.y = y;
}
public Type getAssociatedType() {
return TYPE;
}
protected void dispatch(ChartClickHandler chartClickHandler) {
chartClickHandler.onChartClick(this);
}
} | lgpl-2.1 |
it-tavis/opencms-core | src/org/opencms/db/postgresql/CmsHistoryDriver.java | 3735 | /*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (c) Alkacon Software GmbH (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software GmbH, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.db.postgresql;
import org.opencms.db.CmsDbContext;
import org.opencms.db.CmsDbSqlException;
import org.opencms.db.I_CmsHistoryDriver;
import org.opencms.db.generic.CmsSqlManager;
import org.opencms.file.CmsDataAccessException;
import org.opencms.file.history.CmsHistoryProject;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* PostgreSql implementation of the history driver methods.<p>
*
* @since 6.9.1
*/
public class CmsHistoryDriver extends org.opencms.db.generic.CmsHistoryDriver {
/**
* @see org.opencms.db.I_CmsHistoryDriver#initSqlManager(String)
*/
@Override
public org.opencms.db.generic.CmsSqlManager initSqlManager(String classname) {
return CmsSqlManager.getInstance(classname);
}
/**
* @see org.opencms.db.I_CmsHistoryDriver#readProjects(org.opencms.db.CmsDbContext)
*/
@Override
public List<CmsHistoryProject> readProjects(CmsDbContext dbc) throws CmsDataAccessException {
List<CmsHistoryProject> projects = new ArrayList<CmsHistoryProject>();
ResultSet res = null;
PreparedStatement stmt = null;
Connection conn = null;
Map<Integer, CmsHistoryProject> tmpProjects = new HashMap<Integer, CmsHistoryProject>();
try {
// create the statement
conn = m_sqlManager.getConnection(dbc);
stmt = m_sqlManager.getPreparedStatement(conn, "C_POSTGRE_PROJECTS_READLAST_HISTORY");
stmt.setInt(1, 300);
res = stmt.executeQuery();
while (res.next()) {
tmpProjects.put(Integer.valueOf(res.getInt("PUBLISH_TAG")), internalCreateProject(res, null));
}
} catch (SQLException e) {
throw new CmsDbSqlException(org.opencms.db.generic.Messages.get().container(
org.opencms.db.generic.Messages.ERR_GENERIC_SQL_1,
CmsDbSqlException.getErrorQuery(stmt)), e);
} finally {
m_sqlManager.closeAll(dbc, conn, stmt, res);
}
I_CmsHistoryDriver historyDriver = m_driverManager.getHistoryDriver(dbc);
for (Map.Entry<Integer, CmsHistoryProject> entry : tmpProjects.entrySet()) {
List<String> resources = historyDriver.readProjectResources(dbc, entry.getKey().intValue());
entry.getValue().setProjectResources(resources);
projects.add(entry.getValue());
}
return projects;
}
} | lgpl-2.1 |
kurtwalker/modeler | core/src/main/java/org/pentaho/agilebi/modeler/geo/LatLngRole.java | 2170 | /*!
* This program is free software; you can redistribute it and/or modify it under the
* terms of the GNU Lesser General Public License, version 2.1 as published by the Free Software
* Foundation.
*
* You should have received a copy of the GNU Lesser General Public License along with this
* program; if not, you can obtain a copy at http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html
* or from the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* Copyright (c) 2002-2017 Hitachi Vantara.. All rights reserved.
*/
package org.pentaho.agilebi.modeler.geo;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* Created by IntelliJ IDEA. User: rfellows Date: 9/26/11 Time: 2:09 PM To change this template use File | Settings |
* File Templates.
*/
public class LatLngRole extends GeoRole implements Serializable {
private static final long serialVersionUID = 3443044732976689019L;
private String prefix = "";
public LatLngRole() {
super();
}
public LatLngRole( String name, List<String> commonAliases ) {
super( name, commonAliases );
}
public LatLngRole( String name, String commonAliases ) {
super( name, commonAliases );
}
@Override
protected boolean eval( String fieldName, String alias ) {
if ( super.eval( fieldName, alias ) ) {
return true;
} else if ( fieldName.endsWith( getMatchSeparator() + alias ) ) {
prefix = fieldName.substring( 0, fieldName.indexOf( getMatchSeparator() + alias ) );
return true;
}
return false;
}
public String getPrefix() {
return prefix;
}
public LatLngRole clone() {
List<String> clonedAliases = (ArrayList<String>) ( (ArrayList<String>) getCommonAliases() ).clone();
LatLngRole clone = new LatLngRole( getName(), clonedAliases );
clone.prefix = getPrefix();
return clone;
}
}
| lgpl-2.1 |
emmanuel-keller/jcifs-krb5 | src/jcifs/spnego/asn1/BERConstructedSequence.java | 1967 | /* Copyright (c) 2000 The Legion Of The Bouncy Castle
* (http://www.bouncycastle.org)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package jcifs.spnego.asn1;
import java.io.IOException;
import java.util.Enumeration;
public class BERConstructedSequence
extends DERConstructedSequence
{
/*
*/
void encode(
DEROutputStream out)
throws IOException
{
if (out instanceof ASN1OutputStream || out instanceof BEROutputStream)
{
out.write(SEQUENCE | CONSTRUCTED);
out.write(0x80);
Enumeration e = getObjects();
while (e.hasMoreElements())
{
out.writeObject(e.nextElement());
}
out.write(0x00);
out.write(0x00);
}
else
{
super.encode(out);
}
}
}
| lgpl-2.1 |
serrapos/opencms-core | src/org/opencms/search/CmsSearchIndexSource.java | 11557 | /*
* This library is part of OpenCms -
* the Open Source Content Management System
*
* Copyright (c) Alkacon Software GmbH (http://www.alkacon.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* For further information about Alkacon Software GmbH, please see the
* company website: http://www.alkacon.com
*
* For further information about OpenCms, please see the
* project website: http://www.opencms.org
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.opencms.search;
import org.opencms.main.CmsIllegalArgumentException;
import org.opencms.main.CmsLog;
import org.opencms.main.OpenCms;
import org.opencms.util.CmsStringUtil;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
/**
* A search index source is a description of a list of Cms resources
* to be indexed.<p>
*
* @since 6.0.0
*/
public class CmsSearchIndexSource implements Comparable<CmsSearchIndexSource> {
/** The log object for this class. */
private static final Log LOG = CmsLog.getLog(CmsSearchIndexSource.class);
/** A list of Cms resource types to be indexed. */
private List<String> m_documentTypes;
/** The indexer. */
private I_CmsIndexer m_indexer;
/** The class name of the indexer. */
private String m_indexerClassName;
/** The logical key/name of this index. */
private String m_name;
/** A map of optional key/value parameters. */
private Map<String, String> m_params;
/** A list of Cms resources to be indexed. */
private List<String> m_resourcesNames;
/**
* Creates a new CmsSearchIndexSource.<p>
*/
public CmsSearchIndexSource() {
m_params = new HashMap<String, String>();
m_resourcesNames = new ArrayList<String>();
m_documentTypes = new ArrayList<String>();
}
/**
* Adds a parameter.<p>
*
* @param key the key/name of the parameter
* @param value the value of the parameter
*/
public void addConfigurationParameter(String key, String value) {
m_params.put(key, value);
}
/**
* Adds the name of a document type.<p>
*
* @param key the name of a document type to add
*/
public void addDocumentType(String key) {
m_documentTypes.add(key);
}
/**
* Adds the path of a Cms resource.<p>
*
* @param resourceName the path of a Cms resource
*/
public void addResourceName(String resourceName) {
m_resourcesNames.add(resourceName);
}
/**
* Returns <code>0</code> if the given object is an index source with the same name. <p>
*
* Note that the name of an index source has to be unique within OpenCms.<p>
*
* @param obj another index source
*
* @return <code>0</code> if the given object is an index source with the same name
*
* @see java.lang.Comparable#compareTo(java.lang.Object)
*/
public int compareTo(CmsSearchIndexSource obj) {
if (obj == this) {
return 0;
}
return m_name.compareTo(obj.m_name);
}
/**
* Two index sources are considered equal if their names as returned by {@link #getName()} is equal.<p>
*
* Note that the name of an index source has to be unique within OpenCms.<p>
*
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof CmsSearchIndexSource) {
return m_name.equals(((CmsSearchIndexSource)obj).m_name);
}
return false;
}
/**
* Returns the list of names (Strings) of the document types to be indexed.<p>
*
* @return the list of names (Strings) of the document types to be indexed
*/
public List<String> getDocumentTypes() {
return m_documentTypes;
}
/**
* Returns the indexer.<p>
*
* @return the indexer
*/
public I_CmsIndexer getIndexer() {
return m_indexer;
}
/**
* Returns the class name of the indexer.<p>
*
* @return the class name of the indexer
*/
public String getIndexerClassName() {
return m_indexerClassName;
}
/**
* Returns the logical key/name of this search index source.<p>
*
* @return the logical key/name of this search index source
*/
public String getName() {
return m_name;
}
/**
* Returns the value for a specified parameter key.<p>
*
* @param key the parameter key/name
* @return the value for the specified parameter key
*/
public String getParam(String key) {
return m_params.get(key);
}
/**
* Returns the map of optional key/value parameters.<p>
*
* @return the map of optional key/value parameters
*/
public Map<String, String> getParams() {
return m_params;
}
/**
* Returns the list of VFS resources to be indexed.<p>
*
* @return the list of VFS resources to be indexed
*/
public List<String> getResourcesNames() {
return m_resourcesNames;
}
/**
* Overriden to be consistents with overridden method
* <code>{@link #equals(Object)}</code>.
*
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return m_name.hashCode();
}
/**
* Returns <code>true</code> in case the given resource root path is contained in the list of
* configured resource names of this index source.<p>
*
* @param rootPath the resource root path to check
*
* @return <code>true</code> in case the given resource root path is contained in the list of
* configured resource names of this index source
*
* @see #getResourcesNames()
*/
public boolean isContaining(String rootPath) {
if ((rootPath != null) && (m_resourcesNames != null)) {
Iterator<String> i = m_resourcesNames.iterator();
while (i.hasNext()) {
String path = i.next();
if (rootPath.startsWith(path)) {
return true;
}
}
}
return false;
}
/**
* Returns <code>true</code> in case the given resource root path is contained in the list of
* configured resource names, and the given document type name is contained in the
* list if configured document type names of this index source.<p>
*
* @param rootPath the resource root path to check
* @param documentType the document type factory name to check
*
* @return <code>true</code> in case the given resource root path is contained in the list of
* configured resource names, and the given document type name is contained in the
* list if configured document type names of this index source
*
* @see #isContaining(String)
* @see #getDocumentTypes()
*/
public boolean isIndexing(String rootPath, String documentType) {
return m_documentTypes.contains(documentType) && isContaining(rootPath);
}
/**
* Removes the name of a document type from the list of configured types of this index source.<p>
*
* @param key the name of the document type to remove
*
* @return true if the given document type name was contained before thus could be removed successfully, false otherwise
*/
public boolean removeDocumentType(String key) {
return m_documentTypes.remove(key);
}
/**
* Sets the list of document type names (Strings) to be indexed.<p>
*
* @param documentTypes the list of document type names (Strings) to be indexed
*/
public void setDocumentTypes(List<String> documentTypes) {
m_documentTypes = documentTypes;
}
/**
* Sets the class name of the indexer.<p>
*
* An Exception is thrown to allow GUI-display of wrong input.<p>
*
* @param indexerClassName the class name of the indexer
*
* @throws CmsIllegalArgumentException if the given String is not a fully qualified classname (within this Java VM)
*/
public void setIndexerClassName(String indexerClassName) throws CmsIllegalArgumentException {
try {
m_indexer = (I_CmsIndexer)Class.forName(indexerClassName).newInstance();
m_indexerClassName = indexerClassName;
} catch (Exception exc) {
if (LOG.isWarnEnabled()) {
LOG.warn(
Messages.get().getBundle().key(Messages.LOG_INDEXER_CREATION_FAILED_1, m_indexerClassName),
exc);
}
throw new CmsIllegalArgumentException(Messages.get().container(
Messages.ERR_INDEXSOURCE_INDEXER_CLASS_NAME_2,
indexerClassName,
I_CmsIndexer.class.getName()));
}
}
/**
* Sets the logical key/name of this search index source.<p>
*
* @param name the logical key/name of this search index source
*
* @throws CmsIllegalArgumentException if argument name is null, an empty or whitespace-only Strings
* or already used for another indexsource's name.
*/
public void setName(String name) throws CmsIllegalArgumentException {
if (CmsStringUtil.isEmptyOrWhitespaceOnly(name)) {
throw new CmsIllegalArgumentException(Messages.get().container(
Messages.ERR_INDEXSOURCE_CREATE_MISSING_NAME_0));
}
// already used? Don't test this at xml-configuration time (no manager)
if (OpenCms.getRunLevel() > OpenCms.RUNLEVEL_2_INITIALIZING) {
CmsSearchManager mngr = OpenCms.getSearchManager();
// don't test this if the indexsource is not new (widget invokes setName even if it was not changed)
if (mngr.getIndexSource(name) != this) {
if (mngr.getSearchIndexSources().keySet().contains(name)) {
throw new CmsIllegalArgumentException(Messages.get().container(
Messages.ERR_INDEXSOURCE_CREATE_INVALID_NAME_1,
name));
}
}
}
m_name = name;
}
/**
* Sets the map of optional key/value parameters.<p>
*
* @param params the map of optional key/value parameters
*/
public void setParams(Map<String, String> params) {
m_params = params;
}
/**
* Sets the list of Cms resources to be indexed.<p>
*
* @param resources the list of Cms resources (Strings) to be indexed
*/
public void setResourcesNames(List<String> resources) {
m_resourcesNames = resources;
}
} | lgpl-2.1 |
git-moss/Push2Display | lib/batik-1.8/sources/org/apache/batik/anim/dom/SVGOMMultiImageElement.java | 2733 | /*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.anim.dom;
import org.apache.batik.dom.AbstractDocument;
import org.apache.batik.util.SVG12Constants;
import org.w3c.dom.Node;
/**
* This class implements a multiImage extension to SVG.
*
* The 'multiImage' element is similar to the 'image' element (supports
* all the same attributes and properties) except.
* <ol>
* <li>It has two addtional attributes: 'pixel-width' and
* 'pixel-height' which are the maximum width and height of the
* image referenced by the xlink:href attribute.</li>
* <li>It can contain a child element 'subImage' which has
* two attributes, pixel-width, pixel-height.
* It holds SVG content to be rendered.</li>
* <li>It can contain a child element 'subImageRef' which has only
* three attributes, pixel-width, pixel-height and xlink:href.
* The image displayed is the smallest image such that
* pixel-width and pixel-height are greater than or equal to the
* required image size for display.</li>
* </ol>
*
* @author <a href="mailto:thomas.deweese@kodak.com">Thomas DeWeese</a>
* @version $Id$ */
public class SVGOMMultiImageElement
extends SVGStylableElement {
/**
* Creates a new SVG MultiImageElement object.
*/
protected SVGOMMultiImageElement() {
}
/**
* Creates a new SVG MultiImageElement object.
* @param prefix The namespace prefix.
* @param owner The owner document.
*/
public SVGOMMultiImageElement(String prefix, AbstractDocument owner) {
super(prefix, owner);
}
/**
* <b>DOM</b>: Implements {@link org.w3c.dom.Node#getLocalName()}.
*/
public String getLocalName() {
return SVG12Constants.SVG_MULTI_IMAGE_TAG;
}
/**
* Returns a new uninitialized instance of this object's class.
*/
protected Node newNode() {
return new SVGOMMultiImageElement();
}
}
| lgpl-3.0 |
briandipalma/brjs | brjs-core-tests/src/test/java/org/bladerunnerjs/spec/app/ServeAppTest.java | 6815 | package org.bladerunnerjs.spec.app;
import org.bladerunnerjs.api.App;
import org.bladerunnerjs.api.AppConf;
import org.bladerunnerjs.api.Aspect;
import org.bladerunnerjs.api.model.exception.request.ResourceNotFoundException;
import org.bladerunnerjs.api.spec.engine.SpecTest;
import org.bladerunnerjs.api.BladeWorkbench;
import org.bladerunnerjs.spec.brjs.appserver.MockTagHandler;
import org.bladerunnerjs.testing.utility.MockContentPlugin;
import org.junit.Before;
import org.junit.Test;
public class ServeAppTest extends SpecTest {
private App app;
private AppConf appConf;
private Aspect defaultAspect;
private Aspect alternateAspect;
private BladeWorkbench workbench;
private StringBuffer response = new StringBuffer();
@Before
public void initTestObjects() throws Exception
{
given(brjs).automaticallyFindsAssetPlugins()
.and(brjs).automaticallyFindsMinifierPlugins()
.and(brjs).automaticallyFindsContentPlugins()
.and(brjs).automaticallyFindsRequirePlugins()
.and(brjs).hasTagHandlerPlugins(new MockTagHandler("tagToken", "dev replacement", "prod replacement", false), new MockTagHandler("localeToken", "", "", true))
.and(brjs).hasContentPlugins(new MockContentPlugin())
.and(brjs).hasBeenCreated();
app = brjs.app("app1");
appConf = app.appConf();
defaultAspect = app.aspect("default");
alternateAspect = app.aspect("alternate");
workbench = app.bladeset("bs").blade("b1").workbench();
}
@Test
public void indexPageCanBeAccessedForSingleLocaleApps() throws Exception {
given(defaultAspect).indexPageHasContent("index page")
.and(brjs).localeSwitcherHasContents("");
when(app).requestReceived("", response);
then(response).textEquals("index page");
}
@Test
public void localeForwardingPageIsReturnedIfNoLocaleIsSpecifiedForMultiLocaleApps() throws Exception {
given(appConf).supportsLocales("en", "de")
.and(defaultAspect).indexPageHasContent("index page")
.and(brjs).localeSwitcherHasContents("locale forwarding page");
when(app).requestReceived("", response);
then(response).containsText("locale forwarding page");
}
@Test
public void localeForwardingPageHasADocType() throws Exception {
given(appConf).supportsLocales("en", "de")
.and(defaultAspect).indexPageHasContent("index page")
.and(brjs).localeSwitcherHasContents("locale forwarding page");
when(app).requestReceived("", response);
then(response).containsText("<!DOCTYPE html>");
}
@Test
public void localeForwardingPageHasANoScriptOptionToRedirectToDefaultLocale() throws Exception {
given(appConf).supportsLocales("en", "de")
.and(defaultAspect).indexPageHasContent("index page")
.and(brjs).localeSwitcherHasContents("locale forwarding page");
when(app).requestReceived("", response);
then(response).containsText("<noscript>\n"+"\t<meta http-equiv='refresh' content='0; url='en'>\n"+"</noscript>");
}
@Test
public void exceptionIsThrownIfAnInvalidLocaleIsRequestedForMultiLocaleApps() throws Exception {
given(appConf).supportsLocales("en", "de")
.and(defaultAspect).indexPageHasContent("index page")
.and(brjs).localeSwitcherHasContents("locale forwarding page");
when(app).requestReceived("zz", response);
then(exceptions).verifyException(ResourceNotFoundException.class, "zz");
}
@Test
public void indexPageCanBeAccessedForMultiLocaleApps() throws Exception {
given(appConf).supportsLocales("en", "de")
.and(defaultAspect).indexPageHasContent("index page")
.and(brjs).localeSwitcherHasContents("");
when(app).requestReceived("en", response);
then(response).textEquals("index page");
}
@Test
public void tagsWithinIndexPagesAreProcessed() throws Exception {
given(defaultAspect).indexPageHasContent("<@tagToken @/>")
.and(brjs).localeSwitcherHasContents("");
when(app).requestReceived("", response);
then(response).textEquals("dev replacement");
}
@Test
public void localesCanBeUsedInTagHandlersInSingleLocaleApps() throws Exception {
given(appConf).supportsLocales("en_GB")
.and(defaultAspect).indexPageHasContent("<@localeToken @/>")
.and(brjs).localeSwitcherHasContents("");
when(app).requestReceived("", response);
then(response).textEquals("- en_GB");
}
@Test
public void localesCanBeUsedInTagHandlersInMultiLocaleApps() throws Exception {
given(appConf).supportsLocales("en", "en_GB")
.and(defaultAspect).indexPageHasContent("<@localeToken @/>")
.and(brjs).localeSwitcherHasContents("");
when(app).requestReceived("en_GB", response);
then(response).textEquals("- en_GB");
}
@Test
public void workbenchPageCanBeAccessedInSingleLocaleApps() throws Exception {
given(workbench).indexPageHasContent("workbench index page")
.and(brjs).localeSwitcherHasContents("");
when(app).requestReceived("bs/b1/workbench/", response);
then(response).textEquals("workbench index page");
}
@Test
public void workbenchPageCanBeAccessedInMultiLocaleApps() throws Exception {
given(appConf).supportsLocales("en", "en_GB")
.and(workbench).indexPageHasContent("workbench index page")
.and(brjs).localeSwitcherHasContents("");
when(app).requestReceived("bs/b1/workbench/en", response);
then(response).textEquals("workbench index page");
}
@Test
public void defaultAspectBundlesCanBeRequested() throws Exception {
given(defaultAspect).indexPageRequires("appns/SomeClass")
.and(defaultAspect).hasClass("appns/SomeClass")
.and(defaultAspect).containsFileWithContents("src/appns/template.html", "<div id='template-id'>template file</div>");
when(app).requestReceived("v/dev/html/bundle.html", response);
then(response).containsText("template file");
}
@Test
public void alternateAspectBundlesCanBeRequested() throws Exception {
given(alternateAspect).indexPageRequires("appns/SomeClass")
.and(alternateAspect).hasClass("appns/SomeClass")
.and(alternateAspect).containsFileWithContents("src/appns/template.html", "<div id='template-id'>template file</div>");
when(app).requestReceived("alternate/v/dev/html/bundle.html", response);
then(response).containsText("template file");
}
@Test
public void workbenchBundlesCanBeRequested() throws Exception {
given(workbench).indexPageRequires("appns/SomeClass")
.and(workbench).hasClass("appns/SomeClass")
.and(workbench).containsFileWithContents("src/appns/template.html", "<div id='template-id'>workbench template file</div>");
when(app).requestReceived("bs/b1/workbench/v/dev/html/bundle.html", response);
then(response).containsText("workbench template file");
}
@Test
public void contentPluginsCanDefineNonVersionedUrls() throws Exception
{
given(app).hasBeenCreated();
when(app).requestReceived("mock-content-plugin/unversioned/url", response);
then(response).containsText(MockContentPlugin.class.getCanonicalName());
}
}
| lgpl-3.0 |
lejingw/hermes | hermes-producer/src/main/java/com/ctrip/hermes/producer/pipeline/DefaultProducerPipelineSinkManager.java | 1607 | package com.ctrip.hermes.producer.pipeline;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.Future;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.unidal.lookup.ContainerHolder;
import org.unidal.lookup.annotation.Inject;
import org.unidal.lookup.annotation.Named;
import com.ctrip.hermes.core.meta.MetaService;
import com.ctrip.hermes.core.pipeline.PipelineSink;
import com.ctrip.hermes.core.result.SendResult;
@Named(type = ProducerPipelineSinkManager.class)
public class DefaultProducerPipelineSinkManager extends ContainerHolder implements Initializable, ProducerPipelineSinkManager {
@Inject
private MetaService m_meta;
private Map<String, PipelineSink<Future<SendResult>>> m_sinks = new HashMap<String, PipelineSink<Future<SendResult>>>();
@Override
public PipelineSink<Future<SendResult>> getSink(String topic) {
String type = m_meta.findEndpointTypeByTopic(topic);
if (m_sinks.containsKey(type)) {
return m_sinks.get(type);
} else {
throw new IllegalArgumentException(String.format("Unknown message sink for topic %s", topic));
}
}
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public void initialize() throws InitializationException {
Map<String, PipelineSink> sinks = lookupMap(PipelineSink.class);
for (Entry<String, PipelineSink> entry : sinks.entrySet()) {
m_sinks.put(entry.getKey(), entry.getValue());
}
}
}
| apache-2.0 |
asedunov/intellij-community | java/java-tests/testSrc/com/intellij/java/codeInsight/template/postfix/templates/ForeachTemplateTest.java | 1546 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.codeInsight.template.postfix.templates;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import org.jetbrains.annotations.NotNull;
public class ForeachTemplateTest extends PostfixTemplateTestCase {
@NotNull
@Override
protected String getSuffix() {
return "for";
}
public void testInts() {
doTest();
}
public void testBeforeAssignment() {
doTest();
}
public void testInAnonymousRunnable() {
doTest();
}
public void testIterSameAsFor() {
doTest();
}
public void testFinalLocals() {
CodeStyleSettings settings = CodeStyleSettingsManager.getSettings(getProject());
boolean oldGenerateFinalLocals = settings.GENERATE_FINAL_LOCALS;
try {
settings.GENERATE_FINAL_LOCALS = true;
doTest();
}
finally {
settings.GENERATE_FINAL_LOCALS = oldGenerateFinalLocals;
}
}
}
| apache-2.0 |
yuyupapa/OpenSource | scouter.client/src/scouter/client/stack/data/StackAnalyzedValue.java | 1957 | /*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.client.stack.data;
import scouter.client.stack.utils.NumberUtils;
public class StackAnalyzedValue {
private String m_value = null;
private int m_count = 0;
private int m_intPct = 0;
private int m_extPct = 0;
public StackAnalyzedValue(){
}
public StackAnalyzedValue(String value, int count, int intPct, int extPct){
m_value = value;
m_count = count;
m_intPct = intPct;
m_extPct = extPct;
}
public String getValue(){
return m_value;
}
public int getCount(){
return m_count;
}
public int getIntPct(){
return m_intPct;
}
public int getExtPct(){
return m_extPct;
}
public void setValue(String value){
m_value = value;
}
public void setCount(int value){
m_count = value;
}
public void setIntPct(int value){
m_intPct = value;
}
public void setExtPct(int value){
m_extPct = value;
}
public void addCount(){
m_count++;
}
public String [] toTableInfo(){
String [] info = new String[4];
info[0] = new StringBuilder().append(m_count).toString();
info[1] = new StringBuilder().append(NumberUtils.intToPercent(m_intPct)).append('%').toString();
info[2] = new StringBuilder().append(NumberUtils.intToPercent(m_extPct)).append('%').toString();
info[3] = m_value;
return info;
}
}
| apache-2.0 |
innovimax/vxquery | vxquery-core/src/main/java/org/apache/vxquery/runtime/functions/cast/CastToPositiveIntegerOperation.java | 1127 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.vxquery.runtime.functions.cast;
import org.apache.vxquery.datamodel.values.ValueTag;
public class CastToPositiveIntegerOperation extends CastToIntegerOperation {
public CastToPositiveIntegerOperation() {
negativeAllowed = false;
returnTag = ValueTag.XS_POSITIVE_INTEGER_TAG;
}
} | apache-2.0 |
anchela/jackrabbit-oak | oak-core/src/test/java/org/apache/jackrabbit/oak/security/user/MembershipTest.java | 5631 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.security.user;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import org.apache.jackrabbit.api.security.user.Authorizable;
import org.apache.jackrabbit.api.security.user.Group;
import org.apache.jackrabbit.api.security.user.User;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
/**
* Tests large group and user graphs.
*
* <ul>
* <li>{@link #NUM_USERS} users</li>
* <li>{@link #NUM_GROUPS} groups</li>
* <li>1 group with all users</li>
* <li>1 user with all groups</li>
* </ul>
*
* @since OAK 1.0
*/
public class MembershipTest extends MembershipBaseTest {
private static final int MANY_USERS = 1000;
@Before
public void before() throws Exception {
super.before();
}
@Test
public void testManyMemberships() throws Exception {
Set<String> memberships = new HashSet<>();
User usr = createUser();
for (int i = 0; i < MANY_USERS; i++) {
Group grp = createGroup();
grp.addMember(usr);
memberships.add(grp.getID());
}
root.commit();
Iterator<Group> iter = usr.declaredMemberOf();
while (iter.hasNext()) {
Group group = iter.next();
Assert.assertTrue(memberships.remove(group.getID()));
}
assertEquals(0, memberships.size());
verify(monitor, times(1)).doneMemberOf(anyLong(), eq(true));
verify(monitor, never()).doneMemberOf(anyLong(), eq(false));
}
@Test
public void testNestedMembers() throws Exception {
Set<String> members = new HashSet<>();
Set<String> declaredMembers = new HashSet<>();
Group grp = createGroup();
for (int i = 0; i < 10; i++) {
Group g1 = createGroup();
grp.addMember(g1);
members.add(g1.getID());
declaredMembers.add(g1.getID());
for (int j = 0; j < 10; j++) {
Group g2 = createGroup();
g1.addMember(g2);
members.add(g2.getID());
for (int k = 0; k < 10; k++) {
User usr = createUser();
g2.addMember(usr);
members.add(usr.getID());
}
}
}
root.commit();
assertMembers(grp, members);
Iterator<Authorizable> iter = grp.getDeclaredMembers();
while (iter.hasNext()) {
Authorizable member = iter.next();
Assert.assertTrue(declaredMembers.remove(member.getID()));
}
assertEquals(0, declaredMembers.size());
}
@Test
public void testNestedMemberships() throws Exception {
Set<String> memberships = new HashSet<>();
User user = createUser();
Group grp = createGroup();
memberships.add(grp.getID());
for (int i = 0; i < 10; i++) {
Group g1 = createGroup();
grp.addMember(g1);
memberships.add(g1.getID());
for (int j = 0; j < 10; j++) {
Group g2 = createGroup();
g1.addMember(g2);
memberships.add(g2.getID());
g2.addMember(user);
}
}
root.commit();
Iterator<Group> iter = user.memberOf();
while (iter.hasNext()) {
Group group = iter.next();
Assert.assertTrue(memberships.remove(group.getID()));
}
assertEquals(0, memberships.size());
verify(monitor, times(210)).doneUpdateMembers(anyLong(), eq(1L), eq(0L), eq(false));
verify(monitor, times(1)).doneMemberOf(anyLong(), eq(false));
verify(monitor, never()).doneMemberOf(anyLong(), eq(true));
}
@Test
public void testAddMembersAgain() throws Exception {
Set<String> members = new HashSet<>();
Group grp = createGroup();
for (int i = 0; i < MANY_USERS; i++) {
User usr = createUser();
grp.addMember(usr);
members.add(usr.getID());
}
root.commit();
verify(monitor, times(MANY_USERS)).doneUpdateMembers(anyLong(), eq(1L), eq(0L), eq(false));
for (String id : members) {
assertFalse(grp.addMember(userMgr.getAuthorizable(id)));
}
verify(monitor, times(MANY_USERS)).doneUpdateMembers(anyLong(), eq(1L), eq(1L), eq(false));
verifyNoMoreInteractions(monitor);
}
} | apache-2.0 |
onders86/camel | components/camel-metrics/src/main/java/org/apache/camel/component/metrics/MetricsComponent.java | 5110 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.metrics;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Slf4jReporter;
import org.apache.camel.Endpoint;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.impl.UriEndpointComponent;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.Registry;
import org.apache.camel.util.StringHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Represents the component that manages metrics endpoints.
*/
public class MetricsComponent extends UriEndpointComponent {
public static final String METRIC_REGISTRY_NAME = "metricRegistry";
public static final MetricsType DEFAULT_METRICS_TYPE = MetricsType.METER;
public static final long DEFAULT_REPORTING_INTERVAL_SECONDS = 60L;
private static final Logger LOG = LoggerFactory.getLogger(MetricsComponent.class);
@Metadata(label = "advanced")
private MetricRegistry metricRegistry;
public MetricsComponent() {
super(MetricsEndpoint.class);
}
@Override
protected Endpoint createEndpoint(String uri, String remaining, Map<String, Object> parameters) throws Exception {
if (metricRegistry == null) {
Registry camelRegistry = getCamelContext().getRegistry();
metricRegistry = getOrCreateMetricRegistry(camelRegistry, METRIC_REGISTRY_NAME);
}
String metricsName = getMetricsName(remaining);
MetricsType metricsType = getMetricsType(remaining);
LOG.debug("Metrics type: {}; name: {}", metricsType, metricsName);
Endpoint endpoint = new MetricsEndpoint(uri, this, metricRegistry, metricsType, metricsName);
setProperties(endpoint, parameters);
return endpoint;
}
String getMetricsName(String remaining) {
String name = StringHelper.after(remaining, ":");
return name == null ? remaining : name;
}
MetricsType getMetricsType(String remaining) {
String name = StringHelper.before(remaining, ":");
MetricsType type;
if (name == null) {
type = DEFAULT_METRICS_TYPE;
} else {
type = MetricsType.getByName(name);
}
if (type == null) {
throw new RuntimeCamelException("Unknown metrics type \"" + name + "\"");
}
return type;
}
MetricRegistry getOrCreateMetricRegistry(Registry camelRegistry, String registryName) {
LOG.debug("Looking up MetricRegistry from Camel Registry for name \"{}\"", registryName);
MetricRegistry result = getMetricRegistryFromCamelRegistry(camelRegistry, registryName);
if (result == null) {
LOG.debug("MetricRegistry not found from Camel Registry for name \"{}\"", registryName);
LOG.info("Creating new default MetricRegistry");
result = createMetricRegistry();
}
return result;
}
MetricRegistry getMetricRegistryFromCamelRegistry(Registry camelRegistry, String registryName) {
MetricRegistry registry = camelRegistry.lookupByNameAndType(registryName, MetricRegistry.class);
if (registry != null) {
return registry;
} else {
Set<MetricRegistry> registries = camelRegistry.findByType(MetricRegistry.class);
if (registries.size() == 1) {
return registries.iterator().next();
}
}
return null;
}
MetricRegistry createMetricRegistry() {
MetricRegistry registry = new MetricRegistry();
final Slf4jReporter reporter = Slf4jReporter.forRegistry(registry)
.outputTo(LOG)
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.withLoggingLevel(Slf4jReporter.LoggingLevel.DEBUG)
.build();
reporter.start(DEFAULT_REPORTING_INTERVAL_SECONDS, TimeUnit.SECONDS);
return registry;
}
public MetricRegistry getMetricRegistry() {
return metricRegistry;
}
/**
* To use a custom configured MetricRegistry.
*/
public void setMetricRegistry(MetricRegistry metricRegistry) {
this.metricRegistry = metricRegistry;
}
}
| apache-2.0 |
yuyupapa/OpenSource | scouter.client/src/scouter/client/stack/config/preprocessor/ProcessorReplace.java | 1720 | /*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.client.stack.config.preprocessor;
import org.w3c.dom.Node;
public class ProcessorReplace extends Processor{
private String m_source = null;
private String m_target = null;
private boolean m_isAll = true;
public ProcessorReplace(){
setType(Processor.TYPE.REPLACE);
}
public String process(String line) {
if(m_isAll){
return line.replaceAll(m_source, m_target);
}else{
return line.replaceFirst(m_source, m_target);
}
}
public void readConfig(ParserPreProcessorReader reader, Node node) {
try {
m_source = reader.getAttribute(node, "source");
m_target = reader.getAttribute(node, "target");
}catch(Exception ex){
throw new RuntimeException(ex);
}
String isAll = null;
try {
isAll = reader.getAttribute(node, "all");
}catch(Exception ex){
}
if(isAll != null){
try {
m_isAll = Boolean.parseBoolean(isAll);
}catch(Exception ex){
throw new RuntimeException("all attribute(true/false) of replace type processor is abnormal!(" + isAll + ")");
}
}
}
}
| apache-2.0 |
marktriggs/nyu-sakai-10.4 | portal/portal-service-impl/impl/src/java/org/sakaiproject/portal/service/PortalServiceImpl.java | 17816 | /**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2005, 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.portal.service;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.pluto.core.PortletContextManager;
import org.apache.pluto.descriptors.portlet.PortletAppDD;
import org.apache.pluto.descriptors.portlet.PortletDD;
import org.apache.pluto.internal.InternalPortletContext;
import org.apache.pluto.spi.optional.PortletRegistryService;
import org.exolab.castor.util.LocalConfiguration;
import org.exolab.castor.util.Configuration.Property;
import org.sakaiproject.component.cover.ComponentManager;
import org.sakaiproject.component.api.ServerConfigurationService;
import org.sakaiproject.content.api.ContentHostingService;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.portal.api.BaseEditor;
import org.sakaiproject.portal.api.Editor;
import org.sakaiproject.portal.api.EditorRegistry;
import org.sakaiproject.portal.api.Portal;
import org.sakaiproject.portal.api.PortalHandler;
import org.sakaiproject.portal.api.PortalRenderEngine;
import org.sakaiproject.portal.api.PortalService;
import org.sakaiproject.portal.api.PortletApplicationDescriptor;
import org.sakaiproject.portal.api.PortletDescriptor;
import org.sakaiproject.portal.api.SiteNeighbourhoodService;
import org.sakaiproject.portal.api.StoredState;
import org.sakaiproject.portal.api.StyleAbleProvider;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.cover.SiteService;
import org.sakaiproject.tool.api.Placement;
import org.sakaiproject.tool.api.Session;
import org.sakaiproject.tool.cover.SessionManager;
/**
* @author ieb
* @since Sakai 2.4
* @version $Rev$
*/
public class PortalServiceImpl implements PortalService
{
private static final Log log = LogFactory.getLog(PortalServiceImpl.class);
/**
* Parameter to force state reset
*/
public static final String PARM_STATE_RESET = "sakai.state.reset";
private static final String PORTAL_SKIN_NEOPREFIX_PROPERTY = "portal.neoprefix";
private static final String PORTAL_SKIN_NEOPREFIX_DEFAULT = "neo-";
private static String portalSkinPrefix;
private Map<String, PortalRenderEngine> renderEngines = new ConcurrentHashMap<String, PortalRenderEngine>();
private Map<String, Map<String, PortalHandler>> handlerMaps = new ConcurrentHashMap<String, Map<String, PortalHandler>>();
private Map<String, Portal> portals = new ConcurrentHashMap<String, Portal>();
private ServerConfigurationService serverConfigurationService;
private StyleAbleProvider stylableServiceProvider;
private SiteNeighbourhoodService siteNeighbourhoodService;
private String m_portalLinks;
private ContentHostingService contentHostingService;
private EditorRegistry editorRegistry;
private Editor noopEditor = new BaseEditor("noop", "noop", "", "");
public void init()
{
try
{
stylableServiceProvider = (StyleAbleProvider) ComponentManager
.get(StyleAbleProvider.class.getName());
serverConfigurationService = (ServerConfigurationService) ComponentManager
.get(ServerConfigurationService.class.getName());
try
{
// configure the parser for castor.. before anything else get a
// chance
Properties castorProperties = LocalConfiguration.getDefault();
String parser = serverConfigurationService.getString(
"sakai.xml.sax.parser",
"com.sun.org.apache.xerces.internal.parsers.SAXParser");
log.info("Configured Castor to use SAX Parser " + parser);
castorProperties.put(Property.Parser, parser);
}
catch (Exception ex)
{
log.error("Failed to configure Castor", ex);
}
portalSkinPrefix = serverConfigurationService.getString(PORTAL_SKIN_NEOPREFIX_PROPERTY, PORTAL_SKIN_NEOPREFIX_DEFAULT);
if (portalSkinPrefix == null) {
portalSkinPrefix = "";
}
}
catch (Exception ex)
{
}
if (stylableServiceProvider == null)
{
log.info("No Styleable Provider found, the portal will not be stylable");
}
}
public StoredState getStoredState()
{
Session s = SessionManager.getCurrentSession();
StoredState ss = (StoredState) s.getAttribute("direct-stored-state");
log.debug("Got Stored State as [" + ss + "]");
return ss;
}
public void setStoredState(StoredState ss)
{
Session s = SessionManager.getCurrentSession();
if (s.getAttribute("direct-stored-state") == null || ss == null)
{
StoredState ssx = (StoredState) s.getAttribute("direct-stored-state");
log.debug("Removing Stored state " + ssx);
if (ssx != null)
{
Exception ex = new Exception("traceback");
log.debug("Removing active Stored State Traceback gives location ", ex);
}
s.setAttribute("direct-stored-state", ss);
log.debug(" Set StoredState as [" + ss + "]");
}
}
private static final String TOOLSTATE_PARAM_PREFIX = "toolstate-";
private static String computeToolStateParameterName(String placementId)
{
return TOOLSTATE_PARAM_PREFIX + placementId;
}
public String decodeToolState(Map<String, String[]> params, String placementId)
{
String attrname = computeToolStateParameterName(placementId);
String[] attrval = params.get(attrname);
return attrval == null ? null : attrval[0];
}
public Map<String, String[]> encodeToolState(String placementId, String URLstub)
{
String attrname = computeToolStateParameterName(placementId);
Map<String, String[]> togo = new HashMap<String, String[]>();
// could assemble state from other visible tools here
togo.put(attrname, new String[] { URLstub });
return togo;
}
// To allow us to retain reset state across redirects
public String getResetState()
{
Session s = SessionManager.getCurrentSession();
String ss = (String) s.getAttribute("reset-stored-state");
return ss;
}
public void setResetState(String ss)
{
Session s = SessionManager.getCurrentSession();
if (s.getAttribute("reset-stored-state") == null || ss == null)
{
s.setAttribute("reset-stored-state", ss);
}
}
public boolean isEnableDirect()
{
boolean directEnable = "true".equals(serverConfigurationService.getString(
"charon.directurl", "true"));
log.debug("Direct Enable is " + directEnable);
return directEnable;
}
public boolean isResetRequested(HttpServletRequest req)
{
return "true".equals(req.getParameter(PARM_STATE_RESET))
|| "true".equals(getResetState());
}
public String getResetStateParam()
{
// TODO Auto-generated method stub
return PARM_STATE_RESET;
}
public StoredState newStoredState(String marker, String replacement)
{
log.debug("Storing State for Marker=[" + marker + "] replacement=[" + replacement
+ "]");
return new StoredStateImpl(marker, replacement);
}
public Iterator<PortletApplicationDescriptor> getRegisteredApplications()
{
PortletRegistryService registry = PortletContextManager.getManager();
final Iterator apps = registry.getRegisteredPortletApplications();
return new Iterator<PortletApplicationDescriptor>()
{
public boolean hasNext()
{
return apps.hasNext();
}
public PortletApplicationDescriptor next()
{
final InternalPortletContext pc = (InternalPortletContext) apps.next();
final PortletAppDD appDD = pc.getPortletApplicationDefinition();
return new PortletApplicationDescriptor()
{
public String getApplicationContext()
{
return pc.getPortletContextName();
}
public String getApplicationId()
{
return pc.getApplicationId();
}
public String getApplicationName()
{
return pc.getApplicationId();
}
public Iterator<PortletDescriptor> getPortlets()
{
if (appDD != null)
{
List portlets = appDD.getPortlets();
final Iterator portletsI = portlets.iterator();
return new Iterator<PortletDescriptor>()
{
public boolean hasNext()
{
return portletsI.hasNext();
}
public PortletDescriptor next()
{
final PortletDD pdd = (PortletDD) portletsI.next();
return new PortletDescriptor()
{
public String getPortletId()
{
return pdd.getPortletName();
}
public String getPortletName()
{
return pdd.getPortletName();
}
};
}
public void remove()
{
}
};
}
else
{
log.warn(" Portlet Application has no portlets "
+ pc.getPortletContextName());
return new Iterator<PortletDescriptor>()
{
public boolean hasNext()
{
return false;
}
public PortletDescriptor next()
{
return null;
}
public void remove()
{
}
};
}
}
};
}
public void remove()
{
}
};
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.portal.api.PortalService#getRenderEngine(javax.servlet.http.HttpServletRequest)
*/
public PortalRenderEngine getRenderEngine(String context, HttpServletRequest request)
{
// at this point we ignore request but we might use ut to return more
// than one render engine
if (context == null || context.length() == 0)
{
context = Portal.DEFAULT_PORTAL_CONTEXT;
}
return (PortalRenderEngine) renderEngines.get(context);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.portal.api.PortalService#addRenderEngine(org.sakaiproject.portal.api.PortalRenderEngine)
*/
public void addRenderEngine(String context, PortalRenderEngine vengine)
{
renderEngines.put(context, vengine);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.portal.api.PortalService#removeRenderEngine(org.sakaiproject.portal.api.PortalRenderEngine)
*/
public void removeRenderEngine(String context, PortalRenderEngine vengine)
{
renderEngines.remove(context);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.portal.api.PortalService#addHandler(java.lang.String,
* org.sakaiproject.portal.api.PortalHandler)
*/
public void addHandler(Portal portal, PortalHandler handler)
{
String portalContext = portal.getPortalContext();
Map<String, PortalHandler> handlerMap = getHandlerMap(portal);
String urlFragment = handler.getUrlFragment();
PortalHandler ph = handlerMap.get(urlFragment);
if (ph != null)
{
handler.deregister(portal);
log.warn("Handler Present on " + urlFragment + " will replace " + ph
+ " with " + handler);
}
handler.register(portal, this, portal.getServletContext());
handlerMap.put(urlFragment, handler);
log.info("URL " + portalContext + ":/" + urlFragment + " will be handled by "
+ handler);
}
public void addHandler(String portalContext, PortalHandler handler)
{
Portal portal = portals.get(portalContext);
if (portal == null)
{
Map<String, PortalHandler> handlerMap = getHandlerMap(portalContext, true);
handlerMap.put(handler.getUrlFragment(), handler);
log.debug("Registered handler ("+ handler+ ") for portal ("+portalContext+ ") that doesn't yet exist.");
}
else
{
addHandler(portal, handler);
}
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.portal.api.PortalService#getHandlerMap(java.lang.String)
*/
public Map<String, PortalHandler> getHandlerMap(Portal portal)
{
return getHandlerMap(portal.getPortalContext(), true);
}
private Map<String, PortalHandler> getHandlerMap(String portalContext, boolean create)
{
Map<String, PortalHandler> handlerMap = handlerMaps.get(portalContext);
if (create && handlerMap == null)
{
handlerMap = new ConcurrentHashMap<String, PortalHandler>();
handlerMaps.put(portalContext, handlerMap);
}
return handlerMap;
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.portal.api.PortalService#removeHandler(java.lang.String,
* java.lang.String) This method it NOT thread safe, but the likelyhood
* of a co
*/
public void removeHandler(Portal portal, String urlFragment)
{
Map<String, PortalHandler> handlerMap = getHandlerMap(portal.getPortalContext(), false);
if (handlerMap != null)
{
PortalHandler ph = handlerMap.get(urlFragment);
if (ph != null)
{
ph.deregister(portal);
handlerMap.remove(urlFragment);
log.warn("Handler Present on " + urlFragment + " " + ph
+ " will be removed ");
}
}
}
public void removeHandler(String portalContext, String urlFragment)
{
Portal portal = portals.get(portalContext);
if (portal == null)
{
log.warn("Attempted to remove handler("+ urlFragment+ ") from non existent portal ("+portalContext+")");
}
else
{
removeHandler(portal, urlFragment);
}
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.portal.api.PortalService#addPortal(org.sakaiproject.portal.api.Portal)
*/
public void addPortal(Portal portal)
{
String portalContext = portal.getPortalContext();
portals.put(portalContext, portal);
// reconnect any handlers
Map<String, PortalHandler> phm = getHandlerMap(portal);
for (Iterator<PortalHandler> pIterator = phm.values().iterator(); pIterator
.hasNext();)
{
PortalHandler ph = pIterator.next();
ph.register(portal, this, portal.getServletContext());
}
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.portal.api.PortalService#removePortal(org.sakaiproject.portal.api.Portal)
*/
public void removePortal(Portal portal)
{
String portalContext = portal.getPortalContext();
portals.remove(portalContext);
}
/*
* (non-Javadoc)
*
* @see org.sakaiproject.portal.api.PortalService#getStylableService()
*/
public StyleAbleProvider getStylableService()
{
return stylableServiceProvider;
}
/* (non-Javadoc)
* @see org.sakaiproject.portal.api.PortalService#getSiteNeighbourhoodService()
*/
public SiteNeighbourhoodService getSiteNeighbourhoodService()
{
return siteNeighbourhoodService;
}
/**
* @param siteNeighbourhoodService the siteNeighbourhoodService to set
*/
public void setSiteNeighbourhoodService(SiteNeighbourhoodService siteNeighbourhoodService)
{
this.siteNeighbourhoodService = siteNeighbourhoodService;
}
/* optional portal links for portal header (SAK-22912)
*/
public String getPortalLinks()
{
return m_portalLinks;
}
public ContentHostingService getContentHostingService() {
return contentHostingService;
}
/**
* @param portalLinks the portal icons to set
*/
public void setPortalLinks(String portalLinks)
{
m_portalLinks = portalLinks;
}
public void setContentHostingService(ContentHostingService contentHostingService) {
this.contentHostingService = contentHostingService;
}
public String getBrowserCollectionId(Placement placement) {
String collectionId = null;
if (placement != null) {
collectionId = getContentHostingService().getSiteCollection(placement.getContext());
}
if (collectionId == null) {
collectionId = getContentHostingService().getSiteCollection("~" + SessionManager.getCurrentSessionUserId());
}
return collectionId;
}
public Editor getActiveEditor() {
return getActiveEditor(null);
}
public Editor getActiveEditor(Placement placement) {
String systemEditor = serverConfigurationService.getString("wysiwyg.editor", "ckeditor");
String activeEditor = systemEditor;
if (placement != null) {
//Allow tool- or user-specific editors?
try {
Site site = SiteService.getSite(placement.getContext());
Object o = site.getProperties().get("wysiwyg.editor");
if (o != null) {
activeEditor = o.toString();
}
}
catch (IdUnusedException ex) {
if (log.isDebugEnabled()) {
log.debug(ex.getMessage());
}
}
}
Editor editor = getEditorRegistry().getEditor(activeEditor);
if (editor == null) {
// Load a base no-op editor so sakai.editor.launch calls succeed.
// We may decide to offer some textarea infrastructure as well. In
// this case, there are editor and launch files being consulted
// already from /library/, which is easier to patch and deploy.
editor = getEditorRegistry().getEditor("textarea");
}
if (editor == null) {
// If, for some reason, our stub editor is null, give an instance
// that doesn't even try to load files. This will result in script
// errors because sakai.editor.launch will not be defined, but
// this way, we can't suffer NPEs. In some cases, this degradation
// will be graceful enough that the page can function.
editor = noopEditor;
}
return editor;
}
public EditorRegistry getEditorRegistry() {
return editorRegistry;
}
public void setEditorRegistry(EditorRegistry editorRegistry) {
this.editorRegistry = editorRegistry;
}
public String getSkinPrefix() {
return portalSkinPrefix;
}
}
| apache-2.0 |
basio/graph | giraph-hive/src/main/java/org/apache/giraph/hive/input/vertex/SimpleHiveToVertex.java | 3548 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.hive.input.vertex;
import org.apache.giraph.edge.Edge;
import org.apache.giraph.edge.OutEdges;
import org.apache.giraph.graph.Vertex;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import com.facebook.hiveio.record.HiveReadableRecord;
import java.util.Iterator;
/**
* Simple implementation of {@link HiveToVertex} when each vertex is in the one
* row of the input.
*
* @param <I> Vertex ID
* @param <V> Vertex Value
* @param <E> Edge Value
*/
public abstract class SimpleHiveToVertex<I extends WritableComparable,
V extends Writable, E extends Writable>
extends AbstractHiveToVertex<I, V, E> {
/** Hive records which we are reading from */
private Iterator<HiveReadableRecord> records;
/** Reusable vertex object */
private Vertex<I, V, E> reusableVertex;
/** Reusable vertex id */
private I reusableVertexId;
/** Reusable vertex value */
private V reusableVertexValue;
/** Reusable edges */
private OutEdges<I, E> reusableOutEdges;
/**
* Read the Vertex's ID from the HiveRecord given.
*
* @param record HiveRecord to read from.
* @return Vertex ID
*/
public abstract I getVertexId(HiveReadableRecord record);
/**
* Read the Vertex's Value from the HiveRecord given.
*
* @param record HiveRecord to read from.
* @return Vertex Value
*/
public abstract V getVertexValue(HiveReadableRecord record);
/**
* Read Vertex's edges from the HiveRecord given.
*
* @param record HiveRecord to read from.
* @return iterable of edges
*/
public abstract Iterable<Edge<I, E>> getEdges(HiveReadableRecord record);
@Override
public void initializeRecords(Iterator<HiveReadableRecord> records) {
this.records = records;
reusableVertex = getConf().createVertex();
reusableVertexId = getConf().createVertexId();
reusableVertexValue = getConf().createVertexValue();
reusableOutEdges = getConf().createOutEdges();
}
@Override
public boolean hasNext() {
return records.hasNext();
}
@Override
public Vertex<I, V, E> next() {
HiveReadableRecord record = records.next();
I id = getVertexId(record);
V value = getVertexValue(record);
Iterable<Edge<I, E>> edges = getEdges(record);
reusableVertex.initialize(id, value, edges);
return reusableVertex;
}
protected I getReusableVertexId() {
return reusableVertexId;
}
protected V getReusableVertexValue() {
return reusableVertexValue;
}
/**
* Get reusable OutEdges object
*
* @param <OE> Type of OutEdges
* @return Reusable OutEdges object
*/
protected <OE extends OutEdges<I, E>> OE getReusableOutEdges() {
return (OE) reusableOutEdges;
}
}
| apache-2.0 |
adessaigne/camel | core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/MvelEndpointBuilderFactory.java | 12497 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import javax.annotation.Generated;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
/**
* Transform messages using an MVEL template.
*
* Generated by camel build tools - do NOT edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface MvelEndpointBuilderFactory {
/**
* Builder for endpoint for the MVEL component.
*/
public interface MvelEndpointBuilder extends EndpointProducerBuilder {
default AdvancedMvelEndpointBuilder advanced() {
return (AdvancedMvelEndpointBuilder) this;
}
/**
* Sets whether the context map should allow access to all details. By
* default only the message body and headers can be accessed. This
* option can be enabled for full access to the current Exchange and
* CamelContext. Doing so impose a potential security risk as this opens
* access to the full power of CamelContext API.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default MvelEndpointBuilder allowContextMapAll(
boolean allowContextMapAll) {
doSetProperty("allowContextMapAll", allowContextMapAll);
return this;
}
/**
* Sets whether the context map should allow access to all details. By
* default only the message body and headers can be accessed. This
* option can be enabled for full access to the current Exchange and
* CamelContext. Doing so impose a potential security risk as this opens
* access to the full power of CamelContext API.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default MvelEndpointBuilder allowContextMapAll(String allowContextMapAll) {
doSetProperty("allowContextMapAll", allowContextMapAll);
return this;
}
/**
* Whether to allow to use resource template from header or not (default
* false). Enabling this allows to specify dynamic templates via message
* header. However this can be seen as a potential security
* vulnerability if the header is coming from a malicious user, so use
* this with care.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default MvelEndpointBuilder allowTemplateFromHeader(
boolean allowTemplateFromHeader) {
doSetProperty("allowTemplateFromHeader", allowTemplateFromHeader);
return this;
}
/**
* Whether to allow to use resource template from header or not (default
* false). Enabling this allows to specify dynamic templates via message
* header. However this can be seen as a potential security
* vulnerability if the header is coming from a malicious user, so use
* this with care.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default MvelEndpointBuilder allowTemplateFromHeader(
String allowTemplateFromHeader) {
doSetProperty("allowTemplateFromHeader", allowTemplateFromHeader);
return this;
}
/**
* Sets whether to use resource content cache or not.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default MvelEndpointBuilder contentCache(boolean contentCache) {
doSetProperty("contentCache", contentCache);
return this;
}
/**
* Sets whether to use resource content cache or not.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default MvelEndpointBuilder contentCache(String contentCache) {
doSetProperty("contentCache", contentCache);
return this;
}
/**
* Character encoding of the resource content.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: producer
*/
default MvelEndpointBuilder encoding(String encoding) {
doSetProperty("encoding", encoding);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default MvelEndpointBuilder lazyStartProducer(boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default MvelEndpointBuilder lazyStartProducer(String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
/**
* Advanced builder for endpoint for the MVEL component.
*/
public interface AdvancedMvelEndpointBuilder
extends
EndpointProducerBuilder {
default MvelEndpointBuilder basic() {
return (MvelEndpointBuilder) this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedMvelEndpointBuilder basicPropertyBinding(
boolean basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Whether the endpoint should use basic property binding (Camel 2.x) or
* the newer property binding with additional capabilities.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedMvelEndpointBuilder basicPropertyBinding(
String basicPropertyBinding) {
doSetProperty("basicPropertyBinding", basicPropertyBinding);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedMvelEndpointBuilder synchronous(boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedMvelEndpointBuilder synchronous(String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
public interface MvelBuilders {
/**
* MVEL (camel-mvel)
* Transform messages using an MVEL template.
*
* Category: transformation,script
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-mvel
*
* Syntax: <code>mvel:resourceUri</code>
*
* Path parameter: resourceUri (required)
* Path to the resource. You can prefix with: classpath, file, http,
* ref, or bean. classpath, file and http loads the resource using these
* protocols (classpath is default). ref will lookup the resource in the
* registry. bean will call a method on a bean to be used as the
* resource. For bean you can specify the method name after dot, eg
* bean:myBean.myMethod.
*
* @param path resourceUri
*/
default MvelEndpointBuilder mvel(String path) {
return MvelEndpointBuilderFactory.endpointBuilder("mvel", path);
}
/**
* MVEL (camel-mvel)
* Transform messages using an MVEL template.
*
* Category: transformation,script
* Since: 2.12
* Maven coordinates: org.apache.camel:camel-mvel
*
* Syntax: <code>mvel:resourceUri</code>
*
* Path parameter: resourceUri (required)
* Path to the resource. You can prefix with: classpath, file, http,
* ref, or bean. classpath, file and http loads the resource using these
* protocols (classpath is default). ref will lookup the resource in the
* registry. bean will call a method on a bean to be used as the
* resource. For bean you can specify the method name after dot, eg
* bean:myBean.myMethod.
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path resourceUri
*/
default MvelEndpointBuilder mvel(String componentName, String path) {
return MvelEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static MvelEndpointBuilder endpointBuilder(String componentName, String path) {
class MvelEndpointBuilderImpl extends AbstractEndpointBuilder implements MvelEndpointBuilder, AdvancedMvelEndpointBuilder {
public MvelEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new MvelEndpointBuilderImpl(path);
}
} | apache-2.0 |
twitter/heron | storm-compatibility/v2.2.0/src/java/org/apache/storm/task/IErrorReporter.java | 915 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.storm.task;
public interface IErrorReporter {
void reportError(Throwable error);
}
| apache-2.0 |
ThiagoGarciaAlves/intellij-community | platform/lang-impl/src/com/intellij/codeInsight/daemon/impl/ChameleonSyntaxHighlightingPass.java | 8538 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon.impl;
import com.intellij.codeHighlighting.MainHighlightingPassFactory;
import com.intellij.codeHighlighting.Pass;
import com.intellij.codeHighlighting.TextEditorHighlightingPass;
import com.intellij.codeHighlighting.TextEditorHighlightingPassRegistrar;
import com.intellij.lang.Language;
import com.intellij.openapi.components.AbstractProjectComponent;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.HighlighterColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.colors.TextAttributesKey;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.fileTypes.SyntaxHighlighter;
import com.intellij.openapi.fileTypes.SyntaxHighlighterFactory;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.ProperTextRange;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.SyntaxTraverser;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.IFileElementType;
import com.intellij.psi.tree.ILazyParseableElementType;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.util.ObjectUtils;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.TreeTraversal;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
import static com.intellij.psi.SyntaxTraverser.psiTraverser;
class ChameleonSyntaxHighlightingPass extends GeneralHighlightingPass {
public static class Factory extends AbstractProjectComponent implements MainHighlightingPassFactory {
protected Factory(Project project, TextEditorHighlightingPassRegistrar registrar) {
super(project);
registrar.registerTextEditorHighlightingPass(this, null, new int[]{Pass.UPDATE_ALL}, false, -1);
}
@Nullable
@Override
public TextEditorHighlightingPass createHighlightingPass(@NotNull PsiFile file, @NotNull Editor editor) {
TextRange restrict = FileStatusMap.getDirtyTextRange(editor, Pass.UPDATE_ALL);
if (restrict == null) return new ProgressableTextEditorHighlightingPass.EmptyPass(myProject, editor.getDocument());
ProperTextRange priority = VisibleHighlightingPassFactory.calculateVisibleRange(editor);
return new ChameleonSyntaxHighlightingPass(myProject, file, editor.getDocument(), ProperTextRange.create(restrict),
priority, editor, new DefaultHighlightInfoProcessor());
}
@Nullable
@Override
public TextEditorHighlightingPass createMainHighlightingPass(@NotNull PsiFile file,
@NotNull Document document,
@NotNull HighlightInfoProcessor highlightInfoProcessor) {
ProperTextRange range = ProperTextRange.from(0, document.getTextLength());
return new ChameleonSyntaxHighlightingPass(myProject, file, document, range, range, null, highlightInfoProcessor);
}
}
ChameleonSyntaxHighlightingPass(@NotNull Project project,
@NotNull PsiFile file,
@NotNull Document document,
@NotNull ProperTextRange restrictRange,
@NotNull ProperTextRange priorityRange,
@Nullable Editor editor,
@NotNull HighlightInfoProcessor highlightInfoProcessor) {
super(project, file, document, restrictRange.getStartOffset(), restrictRange.getEndOffset(), true, priorityRange, editor,
highlightInfoProcessor);
}
@Override
public void collectInformationWithProgress(@NotNull ProgressIndicator progress) {
SyntaxTraverser<PsiElement> s = psiTraverser(myFile)
.filter(o -> {
IElementType type = PsiUtilCore.getElementType(o);
return type instanceof ILazyParseableElementType && !(type instanceof IFileElementType);
});
List<PsiElement> lazyOutside = ContainerUtil.newArrayListWithCapacity(100);
List<PsiElement> lazyInside = ContainerUtil.newArrayListWithCapacity(100);
List<HighlightInfo> outside = ContainerUtil.newArrayListWithCapacity(100);
List<HighlightInfo> inside = ContainerUtil.newArrayListWithCapacity(100);
for (PsiElement e : s) {
(e.getTextRange().intersects(myPriorityRange) ? lazyInside : lazyOutside).add(e);
}
for (PsiElement e : lazyInside) {
collectHighlights(e, inside, outside, myPriorityRange);
}
myHighlightInfoProcessor.highlightsInsideVisiblePartAreProduced(myHighlightingSession, getEditor(), inside, myPriorityRange, myRestrictRange, getId());
for (PsiElement e : lazyOutside) {
collectHighlights(e, inside, outside, myPriorityRange);
}
myHighlightInfoProcessor.highlightsOutsideVisiblePartAreProduced(myHighlightingSession, getEditor(), outside, myPriorityRange, myRestrictRange, getId());
myHighlights.addAll(inside);
myHighlights.addAll(outside);
}
private void collectHighlights(@NotNull PsiElement element,
@NotNull List<HighlightInfo> inside,
@NotNull List<HighlightInfo> outside,
@NotNull ProperTextRange priorityRange) {
EditorColorsScheme scheme = ObjectUtils.notNull(getColorsScheme(), EditorColorsManager.getInstance().getGlobalScheme());
TextAttributes defaultAttrs = scheme.getAttributes(HighlighterColors.TEXT);
Language language = ILazyParseableElementType.LANGUAGE_KEY.get(element.getNode());
if (language == null) return;
SyntaxHighlighter syntaxHighlighter = SyntaxHighlighterFactory.getSyntaxHighlighter(language, myProject, myFile.getVirtualFile());
for (PsiElement token : psiTraverser(element).traverse(TreeTraversal.LEAVES_DFS)) {
TextRange tr = token.getTextRange();
if (tr.isEmpty()) continue;
IElementType type = PsiUtilCore.getElementType(token);
TextAttributesKey[] keys = syntaxHighlighter.getTokenHighlights(type);
// force attribute colors to override host' ones
TextAttributes attributes = null;
for (TextAttributesKey key : keys) {
TextAttributes attrs2 = scheme.getAttributes(key);
if (attrs2 != null) {
attributes = attributes == null ? attrs2 : TextAttributes.merge(attributes, attrs2);
}
}
TextAttributes forcedAttributes;
if (attributes == null || attributes.isEmpty() || attributes.equals(defaultAttrs)) {
forcedAttributes = TextAttributes.ERASE_MARKER;
}
else {
HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.INJECTED_LANGUAGE_FRAGMENT).
range(tr).
textAttributes(TextAttributes.ERASE_MARKER).
createUnconditionally();
(priorityRange.contains(tr) ? inside : outside).add(info);
forcedAttributes = new TextAttributes(attributes.getForegroundColor(), attributes.getBackgroundColor(),
attributes.getEffectColor(), attributes.getEffectType(), attributes.getFontType());
}
HighlightInfo info = HighlightInfo.newHighlightInfo(HighlightInfoType.INJECTED_LANGUAGE_FRAGMENT).
range(tr).
textAttributes(forcedAttributes).
createUnconditionally();
(priorityRange.contains(tr) ? inside : outside).add(info);
}
}
@Override
protected void applyInformationWithProgress() {
}
@Nullable
@Override
protected String getPresentableName() {
return null; // do not show progress for
}
}
| apache-2.0 |
ThiagoGarciaAlves/intellij-community | json/src/com/jetbrains/jsonSchema/impl/JsonSchemaTreeNode.java | 6194 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.jsonSchema.impl;
import com.intellij.util.SmartList;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* @author Irina.Chernushina on 4/20/2017.
*/
public class JsonSchemaTreeNode {
private boolean myAny;
private boolean myNothing;
private int myExcludingGroupNumber = -1;
@NotNull private SchemaResolveState myResolveState = SchemaResolveState.normal;
@Nullable private final JsonSchemaObject mySchema;
@NotNull private final List<JsonSchemaVariantsTreeBuilder.Step> mySteps = new SmartList<>();
@Nullable private final JsonSchemaTreeNode myParent;
@NotNull private final List<JsonSchemaTreeNode> myChildren = new ArrayList<>();
public JsonSchemaTreeNode(@Nullable JsonSchemaTreeNode parent,
@Nullable JsonSchemaObject schema) {
assert schema != null || parent != null;
myParent = parent;
mySchema = schema;
if (parent != null && !parent.getSteps().isEmpty()) {
mySteps.addAll(parent.getSteps().subList(1, parent.getSteps().size()));
}
}
public void anyChild() {
final JsonSchemaTreeNode node = new JsonSchemaTreeNode(this, null);
node.myAny = true;
myChildren.add(node);
}
public void nothingChild() {
final JsonSchemaTreeNode node = new JsonSchemaTreeNode(this, null);
node.myNothing = true;
myChildren.add(node);
}
public void createChildrenFromOperation(@NotNull JsonSchemaVariantsTreeBuilder.Operation operation) {
if (!SchemaResolveState.normal.equals(operation.myState)) {
final JsonSchemaTreeNode node = new JsonSchemaTreeNode(this, null);
node.myResolveState = operation.myState;
myChildren.add(node);
return;
}
if (!operation.myAnyOfGroup.isEmpty()) {
myChildren.addAll(convertToNodes(operation.myAnyOfGroup));
}
if (!operation.myOneOfGroup.isEmpty()) {
for (int i = 0; i < operation.myOneOfGroup.size(); i++) {
final List<JsonSchemaObject> group = operation.myOneOfGroup.get(i);
final List<JsonSchemaTreeNode> children = convertToNodes(group);
final int number = i;
children.forEach(c -> c.myExcludingGroupNumber = number);
myChildren.addAll(children);
}
}
}
private List<JsonSchemaTreeNode> convertToNodes(List<JsonSchemaObject> children) {
return children.stream().map(s -> new JsonSchemaTreeNode(this, s)).collect(Collectors.toList());
}
@NotNull
public SchemaResolveState getResolveState() {
return myResolveState;
}
public boolean isAny() {
return myAny;
}
public boolean isNothing() {
return myNothing;
}
public void setChild(@NotNull final JsonSchemaObject schema) {
myChildren.add(new JsonSchemaTreeNode(this, schema));
}
@Nullable
public JsonSchemaObject getSchema() {
return mySchema;
}
@NotNull
public List<JsonSchemaVariantsTreeBuilder.Step> getSteps() {
return mySteps;
}
@Nullable
public JsonSchemaTreeNode getParent() {
return myParent;
}
@NotNull
public List<JsonSchemaTreeNode> getChildren() {
return myChildren;
}
public int getExcludingGroupNumber() {
return myExcludingGroupNumber;
}
public void setSteps(@NotNull List<JsonSchemaVariantsTreeBuilder.Step> steps) {
mySteps.clear();
mySteps.addAll(steps);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
JsonSchemaTreeNode node = (JsonSchemaTreeNode)o;
if (myAny != node.myAny) return false;
if (myNothing != node.myNothing) return false;
if (myResolveState != node.myResolveState) return false;
if (mySchema != null ? !mySchema.equals(node.mySchema) : node.mySchema != null) return false;
//noinspection RedundantIfStatement
if (!mySteps.equals(node.mySteps)) return false;
return true;
}
@Override
public int hashCode() {
int result = (myAny ? 1 : 0);
result = 31 * result + (myNothing ? 1 : 0);
result = 31 * result + myResolveState.hashCode();
result = 31 * result + (mySchema != null ? mySchema.hashCode() : 0);
result = 31 * result + mySteps.hashCode();
return result;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("NODE#" + hashCode() + "\n");
sb.append(mySteps.stream().map(Object::toString).collect(Collectors.joining("->", "steps: <", ">")));
sb.append("\n");
if (myExcludingGroupNumber >= 0) sb.append("in excluding group\n");
if (myAny) sb.append("any");
else if (myNothing) sb.append("nothing");
else if (!SchemaResolveState.normal.equals(myResolveState)) sb.append(myResolveState.name());
else {
assert mySchema != null;
final String name = mySchema.getSchemaFile().getName();
sb.append("schema from file: ").append(name).append("\n");
if (mySchema.getRef() != null) sb.append("$ref: ").append(mySchema.getRef()).append("\n");
else if (!mySchema.getProperties().isEmpty()) {
sb.append("properties: ");
sb.append(mySchema.getProperties().keySet().stream().collect(Collectors.joining(", "))).append("\n");
}
if (!myChildren.isEmpty()) {
sb.append("OR children of NODE#").append(hashCode()).append(":\n----------------\n")
.append(myChildren.stream().map(Object::toString).collect(Collectors.joining("\n")))
.append("\n=================\n");
}
}
return sb.toString();
}
}
| apache-2.0 |
tyler-travis/openstorefront | server/openstorefront/openstorefront-web/src/main/java/edu/usu/sdl/openstorefront/web/test/system/DBQueryTest.java | 2799 | /*
* Copyright 2014 Space Dynamics Laboratory - Utah State University Research Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.usu.sdl.openstorefront.web.test.system;
import edu.usu.sdl.openstorefront.core.entity.TestEntity;
import edu.usu.sdl.openstorefront.web.test.BaseTestCase;
import java.util.Arrays;
import java.util.List;
/**
*
* @author dshurtleff
*/
public class DBQueryTest
extends BaseTestCase
{
public DBQueryTest()
{
this.description = "Querying_Test";
}
@Override
protected void runInternalTest()
{
Arrays.asList("A", "B").forEach(item -> {
TestEntity testEntity = new TestEntity();
testEntity.setCode(item);
testEntity.setDescription(item + " - Description");
testEntity.setCreateUser(TEST_USER);
testEntity.setUpdateUser(TEST_USER);
service.getLookupService().saveLookupValue(testEntity);
});
results.append("Saved A, B").append("<br>");
Arrays.asList("C", "D").forEach(item -> {
TestEntity testEntity = new TestEntity();
testEntity.setCode(item);
testEntity.setDescription(item + " - Description");
testEntity.setActiveStatus(TestEntity.INACTIVE_STATUS);
testEntity.setCreateUser(TEST_USER);
testEntity.setUpdateUser(TEST_USER);
service.getLookupService().saveLookupValue(testEntity);
});
results.append("Saved C, D").append("<br>");
results.append("Active").append("<br>");
TestEntity testEntityExample = new TestEntity();
testEntityExample.setActiveStatus(TestEntity.ACTIVE_STATUS);
List<TestEntity> testActiveRecords = testEntityExample.findByExample();
testActiveRecords.stream().forEach(record -> {
results.append(String.join("-", record.getCode(), record.getDescription())).append("<br>");
});
results.append("Check All").append("<br>");
List<TestEntity> testInActiveRecords = service.getPersistenceService().queryByExample(TestEntity.class, new TestEntity());
if (testInActiveRecords.size() == testActiveRecords.size()) {
failureReason.append("All return the same count and active.");
} else {
results.append("Pass").append("<br>");
success = true;
}
results.append("Clean up records").append("<br>");
results.append(service.getPersistenceService().deleteByExample(new TestEntity())).append(" records removed.<br>");
}
}
| apache-2.0 |
skjolber/camunda-bpm-platform | qa/performance-tests-engine/src/main/java/org/camunda/bpm/qa/performance/engine/util/JsonUtil.java | 2134 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.qa.performance.engine.util;
import java.io.File;
import org.camunda.bpm.qa.performance.engine.framework.PerfTestException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion;
/**
* @author Daniel Meyer
*
*/
public class JsonUtil {
private static ObjectMapper mapper;
public static void writeObjectToFile(String filename, Object object) {
final ObjectMapper mapper = getMapper();
try {
File resultFile = new File(filename);
if(resultFile.exists()) {
resultFile.delete();
}
resultFile.createNewFile();
mapper.writerWithDefaultPrettyPrinter().writeValue(resultFile, object);
} catch(Exception e) {
throw new PerfTestException("Cannot write object to file "+filename, e);
}
}
public static <T> T readObjectFromFile(String filename, Class<T> type) {
final ObjectMapper mapper = getMapper();
try {
return mapper.readValue(new File(filename), type);
} catch(Exception e) {
throw new PerfTestException("Cannot read object from file "+filename, e);
}
}
public static ObjectMapper getMapper() {
if(mapper == null) {
mapper = new ObjectMapper();
SerializationConfig config = mapper
.getSerializationConfig()
.withSerializationInclusion(Inclusion.NON_EMPTY);
mapper.setSerializationConfig(config);
}
return mapper;
}
}
| apache-2.0 |
dbrimley/hazelcast | hazelcast/src/main/java/com/hazelcast/client/impl/protocol/util/package-info.java | 702 | /*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Protocol utils
*/
package com.hazelcast.client.impl.protocol.util;
| apache-2.0 |
paulminer/bitcoinj | wallettemplate/src/main/java/wallettemplate/Main.java | 10314 | /*
* Copyright by the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wallettemplate;
import com.google.common.util.concurrent.*;
import javafx.scene.input.*;
import org.bitcoinj.core.NetworkParameters;
import org.bitcoinj.kits.WalletAppKit;
import org.bitcoinj.params.*;
import org.bitcoinj.utils.BriefLogFormatter;
import org.bitcoinj.utils.Threading;
import org.bitcoinj.wallet.DeterministicSeed;
import javafx.application.Application;
import javafx.application.Platform;
import javafx.fxml.FXMLLoader;
import javafx.scene.Node;
import javafx.scene.Scene;
import javafx.scene.layout.Pane;
import javafx.scene.layout.StackPane;
import javafx.stage.Stage;
import wallettemplate.controls.NotificationBarPane;
import wallettemplate.utils.GuiUtils;
import wallettemplate.utils.TextFieldValidator;
import javax.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import static wallettemplate.utils.GuiUtils.*;
public class Main extends Application {
public static NetworkParameters params = MainNetParams.get();
public static final String APP_NAME = "WalletTemplate";
private static final String WALLET_FILE_NAME = APP_NAME.replaceAll("[^a-zA-Z0-9.-]", "_") + "-"
+ params.getPaymentProtocolId();
public static WalletAppKit bitcoin;
public static Main instance;
private StackPane uiStack;
private Pane mainUI;
public MainController controller;
public NotificationBarPane notificationBar;
public Stage mainWindow;
@Override
public void start(Stage mainWindow) throws Exception {
try {
realStart(mainWindow);
} catch (Throwable e) {
GuiUtils.crashAlert(e);
throw e;
}
}
private void realStart(Stage mainWindow) throws IOException {
this.mainWindow = mainWindow;
instance = this;
// Show the crash dialog for any exceptions that we don't handle and that hit the main loop.
GuiUtils.handleCrashesOnThisThread();
if (System.getProperty("os.name").toLowerCase().contains("mac")) {
// We could match the Mac Aqua style here, except that (a) Modena doesn't look that bad, and (b)
// the date picker widget is kinda broken in AquaFx and I can't be bothered fixing it.
// AquaFx.style();
}
// Load the GUI. The MainController class will be automagically created and wired up.
URL location = getClass().getResource("main.fxml");
FXMLLoader loader = new FXMLLoader(location);
mainUI = loader.load();
controller = loader.getController();
// Configure the window with a StackPane so we can overlay things on top of the main UI, and a
// NotificationBarPane so we can slide messages and progress bars in from the bottom. Note that
// ordering of the construction and connection matters here, otherwise we get (harmless) CSS error
// spew to the logs.
notificationBar = new NotificationBarPane(mainUI);
mainWindow.setTitle(APP_NAME);
uiStack = new StackPane();
Scene scene = new Scene(uiStack);
TextFieldValidator.configureScene(scene); // Add CSS that we need.
scene.getStylesheets().add(getClass().getResource("wallet.css").toString());
uiStack.getChildren().add(notificationBar);
mainWindow.setScene(scene);
// Make log output concise.
BriefLogFormatter.init();
// Tell bitcoinj to run event handlers on the JavaFX UI thread. This keeps things simple and means
// we cannot forget to switch threads when adding event handlers. Unfortunately, the DownloadListener
// we give to the app kit is currently an exception and runs on a library thread. It'll get fixed in
// a future version.
Threading.USER_THREAD = Platform::runLater;
// Create the app kit. It won't do any heavyweight initialization until after we start it.
setupWalletKit(null);
if (bitcoin.isChainFileLocked()) {
informationalAlert("Already running", "This application is already running and cannot be started twice.");
Platform.exit();
return;
}
mainWindow.show();
WalletSetPasswordController.estimateKeyDerivationTimeMsec();
bitcoin.addListener(new Service.Listener() {
@Override
public void failed(Service.State from, Throwable failure) {
GuiUtils.crashAlert(failure);
}
}, Platform::runLater);
bitcoin.startAsync();
scene.getAccelerators().put(KeyCombination.valueOf("Shortcut+F"), () -> bitcoin.peerGroup().getDownloadPeer().close());
}
public void setupWalletKit(@Nullable DeterministicSeed seed) {
// If seed is non-null it means we are restoring from backup.
bitcoin = new WalletAppKit(params, new File("."), WALLET_FILE_NAME) {
@Override
protected void onSetupCompleted() {
// Don't make the user wait for confirmations for now, as the intention is they're sending it
// their own money!
bitcoin.wallet().allowSpendingUnconfirmedTransactions();
Platform.runLater(controller::onBitcoinSetup);
}
};
// Now configure and start the appkit. This will take a second or two - we could show a temporary splash screen
// or progress widget to keep the user engaged whilst we initialise, but we don't.
if (params == RegTestParams.get()) {
bitcoin.connectToLocalHost(); // You should run a regtest mode bitcoind locally.
} else if (params == TestNet3Params.get()) {
// As an example!
bitcoin.useTor();
// bitcoin.setDiscovery(new HttpDiscovery(params, URI.create("http://localhost:8080/peers"), ECKey.fromPublicOnly(BaseEncoding.base16().decode("02cba68cfd0679d10b186288b75a59f9132b1b3e222f6332717cb8c4eb2040f940".toUpperCase()))));
}
bitcoin.setDownloadListener(controller.progressBarUpdater())
.setBlockingStartup(false)
.setUserAgent(APP_NAME, "1.0");
if (seed != null)
bitcoin.restoreWalletFromSeed(seed);
}
private Node stopClickPane = new Pane();
public class OverlayUI<T> {
public Node ui;
public T controller;
public OverlayUI(Node ui, T controller) {
this.ui = ui;
this.controller = controller;
}
public void show() {
checkGuiThread();
if (currentOverlay == null) {
uiStack.getChildren().add(stopClickPane);
uiStack.getChildren().add(ui);
blurOut(mainUI);
//darken(mainUI);
fadeIn(ui);
zoomIn(ui);
} else {
// Do a quick transition between the current overlay and the next.
// Bug here: we don't pay attention to changes in outsideClickDismisses.
explodeOut(currentOverlay.ui);
fadeOutAndRemove(uiStack, currentOverlay.ui);
uiStack.getChildren().add(ui);
ui.setOpacity(0.0);
fadeIn(ui, 100);
zoomIn(ui, 100);
}
currentOverlay = this;
}
public void outsideClickDismisses() {
stopClickPane.setOnMouseClicked((ev) -> done());
}
public void done() {
checkGuiThread();
if (ui == null) return; // In the middle of being dismissed and got an extra click.
explodeOut(ui);
fadeOutAndRemove(uiStack, ui, stopClickPane);
blurIn(mainUI);
//undark(mainUI);
this.ui = null;
this.controller = null;
currentOverlay = null;
}
}
@Nullable
private OverlayUI currentOverlay;
public <T> OverlayUI<T> overlayUI(Node node, T controller) {
checkGuiThread();
OverlayUI<T> pair = new OverlayUI<T>(node, controller);
// Auto-magically set the overlayUI member, if it's there.
try {
controller.getClass().getField("overlayUI").set(controller, pair);
} catch (IllegalAccessException | NoSuchFieldException ignored) {
}
pair.show();
return pair;
}
/** Loads the FXML file with the given name, blurs out the main UI and puts this one on top. */
public <T> OverlayUI<T> overlayUI(String name) {
try {
checkGuiThread();
// Load the UI from disk.
URL location = GuiUtils.getResource(name);
FXMLLoader loader = new FXMLLoader(location);
Pane ui = loader.load();
T controller = loader.getController();
OverlayUI<T> pair = new OverlayUI<T>(ui, controller);
// Auto-magically set the overlayUI member, if it's there.
try {
if (controller != null)
controller.getClass().getField("overlayUI").set(controller, pair);
} catch (IllegalAccessException | NoSuchFieldException ignored) {
ignored.printStackTrace();
}
pair.show();
return pair;
} catch (IOException e) {
throw new RuntimeException(e); // Can't happen.
}
}
@Override
public void stop() throws Exception {
bitcoin.stopAsync();
bitcoin.awaitTerminated();
// Forcibly terminate the JVM because Orchid likes to spew non-daemon threads everywhere.
Runtime.getRuntime().exit(0);
}
public static void main(String[] args) {
launch(args);
}
}
| apache-2.0 |
Donnerbart/hazelcast | hazelcast/src/main/java/com/hazelcast/internal/util/filter/AndFilter.java | 1144 | /*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.util.filter;
/**
* Filter matching only when both sub-filters are matching
*
* @param <T>
*/
public final class AndFilter<T> implements Filter<T> {
private final Filter<T> filter1;
private final Filter<T> filter2;
public AndFilter(Filter<T> filter1, Filter<T> filter2) {
this.filter1 = filter1;
this.filter2 = filter2;
}
@Override
public boolean accept(T object) {
return filter1.accept(object) && filter2.accept(object);
}
}
| apache-2.0 |
spring-projects/spring-boot | spring-boot-project/spring-boot-docs/src/main/java/org/springframework/boot/docs/appendix/configurationmetadata/annotationprocessor/automaticmetadatageneration/MyServerProperties.java | 1460 | /*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.docs.appendix.configurationmetadata.annotationprocessor.automaticmetadatageneration;
import org.springframework.boot.context.properties.ConfigurationProperties;
@ConfigurationProperties(prefix = "my.server")
public class MyServerProperties {
/**
* Name of the server.
*/
private String name;
/**
* IP address to listen to.
*/
private String ip = "127.0.0.1";
/**
* Port to listener to.
*/
private int port = 9797;
// @fold:on // getters/setters ...
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public String getIp() {
return this.ip;
}
public void setIp(String ip) {
this.ip = ip;
}
public int getPort() {
return this.port;
}
public void setPort(int port) {
this.port = port;
}
// fold:off
}
| apache-2.0 |
justintung/hbase | hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StorefileRefresherChore.java | 5683 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.hadoop.hbase.Stoppable;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.master.cleaner.TimeToLiveHFileCleaner;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.util.StringUtils;
/**
* A chore for refreshing the store files for secondary regions hosted in the region server.
*
* This chore should run periodically with a shorter interval than HFile TTL
* ("hbase.master.hfilecleaner.ttl", default 5 minutes).
* It ensures that if we cannot refresh files longer than that amount, the region
* will stop serving read requests because the referenced files might have been deleted (by the
* primary region).
*/
@InterfaceAudience.Private
public class StorefileRefresherChore extends ScheduledChore {
private static final Log LOG = LogFactory.getLog(StorefileRefresherChore.class);
/**
* The period (in milliseconds) for refreshing the store files for the secondary regions.
*/
public static final String REGIONSERVER_STOREFILE_REFRESH_PERIOD
= "hbase.regionserver.storefile.refresh.period";
static final int DEFAULT_REGIONSERVER_STOREFILE_REFRESH_PERIOD = 0; //disabled by default
/**
* Whether all storefiles should be refreshed, as opposed to just hbase:meta's
* Meta region doesn't have WAL replication for replicas enabled yet
*/
public static final String REGIONSERVER_META_STOREFILE_REFRESH_PERIOD
= "hbase.regionserver.meta.storefile.refresh.period";
private HRegionServer regionServer;
private long hfileTtl;
private int period;
private boolean onlyMetaRefresh = true;
//ts of last time regions store files are refreshed
private Map<String, Long> lastRefreshTimes; // encodedName -> long
public StorefileRefresherChore(int period, boolean onlyMetaRefresh, HRegionServer regionServer,
Stoppable stoppable) {
super("StorefileRefresherChore", stoppable, period);
this.period = period;
this.regionServer = regionServer;
this.hfileTtl = this.regionServer.getConfiguration().getLong(
TimeToLiveHFileCleaner.TTL_CONF_KEY, TimeToLiveHFileCleaner.DEFAULT_TTL);
this.onlyMetaRefresh = onlyMetaRefresh;
if (period > hfileTtl / 2) {
throw new RuntimeException(REGIONSERVER_STOREFILE_REFRESH_PERIOD +
" should be set smaller than half of " + TimeToLiveHFileCleaner.TTL_CONF_KEY);
}
lastRefreshTimes = new HashMap<String, Long>();
}
@Override
protected void chore() {
for (HRegion r : regionServer.getOnlineRegionsLocalContext()) {
if (!r.writestate.isReadOnly()) {
// skip checking for this region if it can accept writes
continue;
}
// don't refresh unless enabled for all files, or it the meta region
// meta region don't have WAL replication for replicas enabled yet
if (onlyMetaRefresh && !r.getRegionInfo().isMetaTable()) continue;
String encodedName = r.getRegionInfo().getEncodedName();
long time = EnvironmentEdgeManager.currentTime();
if (!lastRefreshTimes.containsKey(encodedName)) {
lastRefreshTimes.put(encodedName, time);
}
try {
for (Store store : r.getStores().values()) {
// TODO: some stores might see new data from flush, while others do not which
// MIGHT break atomic edits across column families. We can fix this with setting
// mvcc read numbers that we know every store has seen
store.refreshStoreFiles();
}
} catch (IOException ex) {
LOG.warn("Exception while trying to refresh store files for region:" + r.getRegionInfo()
+ ", exception:" + StringUtils.stringifyException(ex));
// Store files have a TTL in the archive directory. If we fail to refresh for that long, we stop serving reads
if (isRegionStale(encodedName, time)) {
r.setReadsEnabled(false); // stop serving reads
}
continue;
}
lastRefreshTimes.put(encodedName, time);
r.setReadsEnabled(true); // restart serving reads
}
// remove closed regions
Iterator<String> lastRefreshTimesIter = lastRefreshTimes.keySet().iterator();
while (lastRefreshTimesIter.hasNext()) {
String encodedName = lastRefreshTimesIter.next();
if (regionServer.getFromOnlineRegions(encodedName) == null) {
lastRefreshTimesIter.remove();
}
}
}
protected boolean isRegionStale(String encodedName, long time) {
long lastRefreshTime = lastRefreshTimes.get(encodedName);
return time - lastRefreshTime > hfileTtl - period;
}
}
| apache-2.0 |
hugoYe/Roid-Library | src/com/rincliu/library/common/persistence/image/core/assist/LoadedFrom.java | 240 | package com.rincliu.library.common.persistence.image.core.assist;
/**
* Source image loaded from.
*
* @author Sergey Tarasevich (nostra13[at]gmail[dot]com)
*/
public enum LoadedFrom {
NETWORK, DISC_CACHE, MEMORY_CACHE
}
| apache-2.0 |
hnyzwtf/WayHoo | WayHoo/src/com/way/beans/City.java | 4503 | package com.way.beans;
import android.os.Parcel;
import android.os.Parcelable;
public class City implements Parcelable {
private String province;
private String city;
private String name;
private String pinyin;
private String py;
private String phoneCode;
private String areaCode;
private String postID;
private long refreshTime;
private int isLocation;
private long pubTime;
private String weatherInfoStr;
public City() {
}
public City(String name, String postID, long refreshTime, int isLocation, long pubTime, String weatherInfoStr) {
super();
this.name = name;
this.postID = postID;
this.refreshTime = refreshTime;
this.isLocation = isLocation;
this.pubTime = pubTime;
this.weatherInfoStr = weatherInfoStr;
}
public City(String name, String postID) {
super();
this.name = name;
this.postID = postID;
}
public City(String province, String city, String name, String pinyin,
String py, String phoneCode, String areaCode, String postID) {
super();
this.province = province;
this.city = city;
this.name = name;
this.pinyin = pinyin;
this.py = py;
this.phoneCode = phoneCode;
this.areaCode = areaCode;
this.postID = postID;
}
public String getProvince() {
return province;
}
public void setProvince(String province) {
this.province = province;
}
public String getCity() {
return city;
}
public void setCity(String city) {
this.city = city;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getPinyin() {
return pinyin;
}
public void setPinyin(String pinyin) {
this.pinyin = pinyin;
}
public String getPy() {
return py;
}
public void setPy(String py) {
this.py = py;
}
public String getPhoneCode() {
return phoneCode;
}
public void setPhoneCode(String phoneCode) {
this.phoneCode = phoneCode;
}
public String getAreaCode() {
return areaCode;
}
public void setAreaCode(String areaCode) {
this.areaCode = areaCode;
}
public String getPostID() {
return postID;
}
public void setPostID(String postID) {
this.postID = postID;
}
public boolean getIsLocation() {
return isLocation == 0 ? false : true;
}
public void setIsLocation(int isLocation) {
this.isLocation = isLocation;
}
public long getRefreshTime() {
return refreshTime;
}
public void setRefreshTime(long refreshTime) {
this.refreshTime = refreshTime;
}
public long getPubTime() {
return pubTime;
}
public void setPubTime(long pubTime) {
this.pubTime = pubTime;
}
public String getWeatherInfoStr() {
return weatherInfoStr;
}
public void setWeatherInfoStr(String weatherInfoStr) {
this.weatherInfoStr = weatherInfoStr;
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + postID != null ? postID.hashCode() : 0;
return result;
}
@Override
public boolean equals(Object o) {
if (o == null)
return false;
if (o == this)
return true;
if (o instanceof City) {
City item = (City) o;
if (item.getPostID().equals(this.postID))
return true;
}
return false;
}
@Override
public String toString() {
return "City [province=" + province + ", city=" + city + ", name="
+ name + ", pinyin=" + pinyin + ", py=" + py + ", phoneCode="
+ phoneCode + ", areaCode=" + areaCode + ", postID=" + postID
+ ", refreshTime=" + refreshTime + ", isLocation=" + isLocation
+ "]";
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(province);
dest.writeString(city);
dest.writeString(name);
dest.writeString(pinyin);
dest.writeString(py);
dest.writeString(phoneCode);
dest.writeString(areaCode);
dest.writeString(postID);
dest.writeLong(refreshTime);
dest.writeInt(isLocation);
}
public static final Parcelable.Creator<City> CREATOR = new Creator<City>() {
@Override
public City createFromParcel(Parcel source) {
City city = new City();
city.province = source.readString();
city.city = source.readString();
city.name = source.readString();
city.pinyin = source.readString();
city.py = source.readString();
city.phoneCode = source.readString();
city.areaCode = source.readString();
city.postID = source.readString();
city.refreshTime = source.readLong();
city.isLocation = source.readInt();
return city;
}
@Override
public City[] newArray(int size) {
return new City[size];
}
};
}
| apache-2.0 |
zazi/Wikidata-Toolkit | wdtk-dumpfiles/src/test/java/org/wikidata/wdtk/dumpfiles/wmf/WmfOnlineDailyDumpFileTest.java | 4391 | package org.wikidata.wdtk.dumpfiles.wmf;
/*
* #%L
* Wikidata Toolkit Dump File Handling
* %%
* Copyright (C) 2014 Wikidata Toolkit Developers
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Paths;
import org.junit.Before;
import org.junit.Test;
import org.wikidata.wdtk.dumpfiles.DumpContentType;
import org.wikidata.wdtk.testing.MockDirectoryManager;
import org.wikidata.wdtk.testing.MockWebResourceFetcher;
import org.wikidata.wdtk.util.CompressionType;
public class WmfOnlineDailyDumpFileTest {
MockWebResourceFetcher wrf;
MockDirectoryManager dm;
@Before
public void setUp() throws IOException {
dm = new MockDirectoryManager(Paths.get(System.getProperty("user.dir")));
wrf = new MockWebResourceFetcher();
}
@Test
public void validDumpProperties() throws IOException {
String dateStamp = "20140220";
wrf.setWebResourceContents(
"http://dumps.wikimedia.org/other/incr/wikidatawiki/"
+ dateStamp + "/status.txt", "done");
wrf.setWebResourceContents(
"http://dumps.wikimedia.org/other/incr/wikidatawiki/"
+ dateStamp + "/wikidatawiki-" + dateStamp
+ "-pages-meta-hist-incr.xml.bz2", "Line1",
CompressionType.BZ2);
WmfOnlineDailyDumpFile dump = new WmfOnlineDailyDumpFile(dateStamp,
"wikidatawiki", wrf, dm);
BufferedReader br = dump.getDumpFileReader();
assertEquals(br.readLine(), "Line1");
assertEquals(br.readLine(), null);
assertTrue(dump.isAvailable());
assertTrue(dump.isAvailable()); // second time should use cached entry
assertEquals(dateStamp, dump.getDateStamp());
assertEquals("wikidatawiki", dump.getProjectName());
assertEquals("wikidatawiki-daily-" + dateStamp, dump.toString());
assertEquals(DumpContentType.DAILY, dump.getDumpContentType());
}
@Test
public void missingDumpProperties() {
String dateStamp = "20140220";
WmfOnlineDailyDumpFile dump = new WmfOnlineDailyDumpFile(dateStamp,
"wikidatawiki", wrf, dm);
assertTrue(!dump.isAvailable());
assertEquals(dateStamp, dump.getDateStamp());
}
@Test
public void emptyDumpProperties() throws IOException {
String dateStamp = "20140220";
wrf.setWebResourceContents(
"http://dumps.wikimedia.org/other/incr/wikidatawiki/"
+ dateStamp + "/status.txt", "");
WmfOnlineDailyDumpFile dump = new WmfOnlineDailyDumpFile(dateStamp,
"wikidatawiki", wrf, dm);
assertTrue(!dump.isAvailable());
assertEquals(dateStamp, dump.getDateStamp());
}
@Test
public void inaccessibleStatus() throws IOException {
String dateStamp = "20140220";
wrf.setWebResourceContents(
"http://dumps.wikimedia.org/other/incr/wikidatawiki/"
+ dateStamp + "/status.txt", "done");
wrf.setReturnFailingReaders(true);
WmfOnlineDailyDumpFile dump = new WmfOnlineDailyDumpFile(dateStamp,
"wikidatawiki", wrf, dm);
assertTrue(!dump.isAvailable());
}
@Test(expected = IOException.class)
public void downloadNoRevisionId() throws IOException {
String dateStamp = "20140220";
wrf.setWebResourceContents(
"http://dumps.wikimedia.org/other/incr/wikidatawiki/"
+ dateStamp + "/wikidatawiki-" + dateStamp
+ "-pages-meta-hist-incr.xml.bz2", "Line1",
CompressionType.BZ2);
WmfOnlineDailyDumpFile dump = new WmfOnlineDailyDumpFile(dateStamp,
"wikidatawiki", wrf, dm);
dump.getDumpFileReader();
}
@Test(expected = IOException.class)
public void downloadNoDumpFile() throws IOException {
String dateStamp = "20140220";
wrf.setWebResourceContents(
"http://dumps.wikimedia.org/other/incr/wikidatawiki/"
+ dateStamp + "/status.txt", "done");
WmfOnlineDailyDumpFile dump = new WmfOnlineDailyDumpFile(dateStamp,
"wikidatawiki", wrf, dm);
dump.getDumpFileReader();
}
}
| apache-2.0 |
siddaartha/spork | test/org/apache/pig/test/TestJsonLoaderStorage.java | 4897 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import java.io.File;
import java.io.FileWriter;
import java.io.PrintWriter;
import java.util.Iterator;
import junit.framework.Assert;
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
import org.apache.pig.data.Tuple;
import org.apache.pig.test.utils.TestHelper;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestJsonLoaderStorage {
private static PigServer pigServer;
File jsonFile;
@BeforeClass
public static void setUp() throws Exception{
removeOutput();
pigServer = new PigServer(ExecType.LOCAL);
}
private static void removeOutput() {
File outputDir = new File("jsonStorage1.json");
if (outputDir.exists()) {
for (File c : outputDir.listFiles())
c.delete();
outputDir.delete();
}
}
@Test
public void testJsonLoaderStorage1() throws Exception{
removeOutput();
pigServer.registerScript("test/org/apache/pig/test/data/jsonStorage1.pig");
File resultFile = new File("jsonStorage1.json/part-m-00000");
String result = Util.readFile(resultFile);
String expected = Util.readFile(new File("test/org/apache/pig/test/data/jsonStorage1.result"));
Assert.assertEquals(TestHelper.sortStringList(expected,"{","}",","), TestHelper.sortStringList(result,"{","}",","));
File schemaFile = new File("jsonStorage1.json/.pig_schema");
result = Util.readFile(schemaFile);
expected = Util.readFile(new File("test/org/apache/pig/test/data/jsonStorage1.schema"));
Assert.assertEquals(expected, result);
File tmpFile = File.createTempFile("tmp", null);
tmpFile.delete();
pigServer.registerQuery("a = load 'jsonStorage1.json' using JsonLoader();");
pigServer.store("a", tmpFile.getCanonicalPath());
result = Util.readFile(new File(tmpFile.getCanonicalPath()+"/part-m-00000"));
expected = Util.readFile(new File("test/org/apache/pig/test/data/jsonStorage1.txt"));
Assert.assertEquals(TestHelper.sortStringList(expected,"[","]",","), TestHelper.sortStringList(result,"[","]",","));
tmpFile = File.createTempFile("tmp", null);
tmpFile.delete();
pigServer.registerQuery("a = load 'jsonStorage1.json' using" +
" JsonLoader('a0:int,a1:{(a10:int,a11:chararray)},a2:(a20:double,a21:bytearray),a3:[chararray]');");
pigServer.store("a", tmpFile.getCanonicalPath());
result = Util.readFile(new File(tmpFile.getCanonicalPath()+"/part-m-00000"));
expected = Util.readFile(new File("test/org/apache/pig/test/data/jsonStorage1.txt"));
Assert.assertEquals(TestHelper.sortStringList(expected,"[","]",","), TestHelper.sortStringList(result,"[","]",","));
}
@Test
public void testJsonLoaderStorage2() throws Exception{
File inputFile = File.createTempFile("tmp", null);
PrintWriter pw = new PrintWriter(new FileWriter(inputFile));
pw.println("\t\t\t");
pw.close();
File interFile = File.createTempFile("tmp", null);
interFile.delete();
pigServer.registerQuery("a = load '" + inputFile.getCanonicalPath() + "' as (a0:int, a1:chararray, a2, a3:(a30:int));");
pigServer.store("a", interFile.getCanonicalPath(), "JsonStorage");
pigServer.registerQuery("b = load '" + interFile.getCanonicalPath() + "' using JsonLoader();");
Iterator<Tuple> iter = pigServer.openIterator("b");
Tuple t = iter.next();
Assert.assertTrue(t.size()==4);
Assert.assertTrue(t.get(0)==null);
Assert.assertTrue(t.get(1)==null);
Assert.assertTrue(t.get(2)==null);
Assert.assertTrue(t.get(3)==null);
Assert.assertFalse(iter.hasNext());
}
@AfterClass
public static void tearDown() {
removeOutput();
}
}
| apache-2.0 |
igniterealtime/Smack | smack-extensions/src/main/java/org/jivesoftware/smackx/address/provider/package-info.java | 717 | /**
*
* Copyright 2015 Florian Schmaus
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Providers for XEP-0033: Extended Stanza Addressing.
*/
package org.jivesoftware.smackx.address.provider;
| apache-2.0 |
miptliot/edx-app-android | VideoLocker/src/main/java/org/edx/mobile/view/custom/ETextView.java | 1539 | package org.edx.mobile.view.custom;
import org.edx.mobile.R;
import org.edx.mobile.logger.Logger;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Typeface;
import android.util.AttributeSet;
import android.widget.TextView;
public class ETextView extends TextView {
private final Logger logger = new Logger(getClass().getName());
public ETextView(Context context) {
super(context);
}
public ETextView(Context context, AttributeSet attrs) {
super(context, attrs);
processAttrs(context, attrs);
}
public ETextView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
processAttrs(context, attrs);
}
private void processAttrs(Context context, AttributeSet attrs) {
if(isInEditMode())
return;
TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.custom_view, 0, 0);
try {
// check for the font attribute and setup font
String fontFileName = a.getString(R.styleable.custom_view_font);
if(fontFileName==null){
//If font is not set for ETextView, set default font
fontFileName = "OpenSans-Regular.ttf";
}
Typeface font = FontFactory.getInstance().getFont(context,fontFileName);
setTypeface(font);
} catch (Exception ex) {
logger.error(ex);
} finally {
// a.recycle();
}
}
}
| apache-2.0 |
pstout/spectator | spectator-api/src/main/java/com/netflix/spectator/api/DefaultCounter.java | 1572 | /**
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spectator.api;
import java.util.Collections;
import java.util.concurrent.atomic.AtomicLong;
/** Counter implementation for the default registry. */
final class DefaultCounter implements Counter {
private final Clock clock;
private final Id id;
private final AtomicLong count;
/** Create a new instance. */
DefaultCounter(Clock clock, Id id) {
this.clock = clock;
this.id = id;
this.count = new AtomicLong(0L);
}
@Override public Id id() {
return id;
}
@Override public boolean hasExpired() {
return false;
}
@Override public Iterable<Measurement> measure() {
long now = clock.wallTime();
long v = count.get();
return Collections.singleton(new Measurement(id, now, v));
}
@Override public void increment() {
count.incrementAndGet();
}
@Override public void increment(long amount) {
count.addAndGet(amount);
}
@Override public long count() {
return count.get();
}
}
| apache-2.0 |
GrammarViz2/Uzaygezen | uzaygezen-core/src/main/java/com/google/uzaygezen/core/LongBitVector.java | 15293 | /*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.uzaygezen.core;
import java.math.BigInteger;
import java.util.BitSet;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import com.google.common.base.Preconditions;
/**
* BitVector implementation for vectors of length 64 or less.
*
* @author Mehmet Akin
* @author Daniel Aioanei
*/
public final class LongBitVector implements BitVector, Cloneable {
private static final long WORD_MASK = -1L;
private static final int BITS_PER_WORD = 64;
private final int size;
// used to clear excess bits after operations.
// Equal to WORD_MASK >>> BITS_PER_WORD - size
private final long mask;
private long data;
public LongBitVector(int size) {
this(size, 0);
Preconditions.checkArgument(size >= 0 && size <= BITS_PER_WORD,
"Size must be positive and <= {1} size: {2}", BITS_PER_WORD, size);
}
/**
* Unsafe constructor. Keep it private.
*/
private LongBitVector(int size, long data) {
assert 64 - Long.numberOfLeadingZeros(data) <= size;
this.size = size;
this.data = data;
mask = size == 0 ? 0 : WORD_MASK >>> BITS_PER_WORD - size;
}
private void checkSize(BitVector other) {
if (size != other.size()) {
throw new IllegalArgumentException(
"Sizes must be equal. " + this.size + " : " + other.size());
}
}
private void checkIndex(int bitIndex) {
if (bitIndex < 0 | bitIndex >= size) {
throw new IndexOutOfBoundsException("Bit index out of range: " + bitIndex);
}
}
private void checkBounds(int fromIndex, int toIndex) {
if (fromIndex < 0 | toIndex > size | fromIndex > toIndex) {
throw new IndexOutOfBoundsException(
"Range [" + fromIndex + ", " + toIndex + ") is invalid for this bit vector");
}
}
@Override
public void and(BitVector o) {
checkSize(o);
data &= o.toExactLong();
}
@Override
public void andNot(BitVector o) {
checkSize(o);
data &= ~o.toExactLong();
}
@Override
public int cardinality() {
return Long.bitCount(data);
}
@Override
public void clear() {
data = 0;
}
@Override
public void clear(int bitIndex) {
checkIndex(bitIndex);
data &= ~(1L << bitIndex);
}
@Override
public void clear(int fromIndex, int toIndex) {
checkBounds(fromIndex, toIndex);
if (fromIndex != toIndex) {
unsafeClearNonEmptySection(fromIndex, toIndex);
}
}
private void unsafeClearNonEmptySection(int fromIndex, int toIndex) {
data &= ~((WORD_MASK << fromIndex) & (WORD_MASK >>> -toIndex));
}
@Override
public void copyFrom(BitVector from) {
checkSize(from);
data = from.toExactLong();
}
@Override
public void flip(int bitIndex) {
checkIndex(bitIndex);
data ^= (1L << bitIndex);
}
@Override
public void flip(int fromIndex, int toIndex) {
checkBounds(fromIndex, toIndex);
if (fromIndex != toIndex) {
data ^= ((WORD_MASK << fromIndex) & (WORD_MASK >>> -toIndex));
}
}
@Override
public boolean get(int bitIndex) {
checkIndex(bitIndex);
return unsafeGet(bitIndex);
}
private boolean unsafeGet(int bitIndex) {
return (data & (1L << bitIndex)) != 0;
}
@Override
public void grayCode() {
data ^= (data >>> 1);
}
@Override
public void grayCodeInverse() {
long localData = data;
localData ^= localData >>> 1;
localData ^= localData >>> 2;
localData ^= localData >>> 4;
localData ^= localData >>> 8;
localData ^= localData >>> 16;
localData ^= localData >>> 32;
data = localData;
}
@Override
public boolean increment() {
// Check for overflow
if (data == mask) {
return false;
}
data++;
return true;
}
@Override
public boolean intersects(BitVector o) {
checkSize(o);
return (data & o.toExactLong()) != 0;
}
@Override
public int length() {
return BITS_PER_WORD - Long.numberOfLeadingZeros(data);
}
@Override
public int size() {
return size;
}
@Override
public int nextClearBit(int fromIndex) {
Preconditions.checkArgument(fromIndex >= 0);
if (fromIndex >= size) {
return -1;
}
long w = ~data & (WORD_MASK << fromIndex);
int tcb = Long.numberOfTrailingZeros(w);
return tcb == size ? -1 : tcb;
}
@Override
public int nextSetBit(int fromIndex) {
Preconditions.checkArgument(fromIndex >= 0);
if (fromIndex >= size) {
return -1;
}
long w = data & (WORD_MASK << fromIndex);
int tcb = Long.numberOfTrailingZeros(w);
return tcb == 64 ? -1 : tcb;
}
@Override
public void or(BitVector o) {
checkSize(o);
this.data |= o.toExactLong();
}
@Override
public void rotate(int count) {
final int localSize = size;
count %= localSize;
final long localData = data;
if (count > 0) {
data = ((localData >>> count) | (localData << localSize - count)) & mask;
} else {
data = ((localData >>> localSize + count) | (localData << -count)) & mask;
}
}
@Override
public void set(int bitIndex) {
checkIndex(bitIndex);
data |= (1L << bitIndex);
}
public void set(int bitIndex, boolean value) {
if (value) {
set(bitIndex);
} else {
clear(bitIndex);
}
}
@Override
public void set(int fromIndex, int toIndex) {
checkBounds(fromIndex, toIndex);
if (fromIndex != toIndex) {
data |= ((WORD_MASK << fromIndex) & (WORD_MASK >>> -toIndex));
}
}
@Override
public void set(int fromIndex, int toIndex, boolean value) {
if (value) {
set(fromIndex, toIndex);
} else {
clear(fromIndex, toIndex);
}
}
@Override
public void xor(BitVector o) {
checkSize(o);
this.data ^= o.toExactLong();
}
@Override
public boolean isEmpty() {
return data == 0;
}
@Override
public LongBitVector clone() {
try {
return (LongBitVector) super.clone();
} catch (CloneNotSupportedException e) {
throw new InternalError("Cloning error. ");
}
}
@Override
public boolean equals(Object obj) {
if (obj instanceof BitVector) {
BitVector other = (BitVector) obj;
// optimisation
if (size <= 64) {
return size == other.size() && data == other.toExactLong();
} else {
return size == other.size() && toBitSet().equals(other.toBitSet());
}
} else {
return false;
}
}
@Override
public int hashCode() {
// We imitate BitSet's hashcode implementation
long h = 1234 ^ data;
int bitSetHashCode = (int) ((h >> 32) ^ h);
return size + 31 * bitSetHashCode;
}
@Override
public String toString() {
return StringUtils.leftPad(Long.toBinaryString(data), size, '0');
}
@Override
public BitSet toBitSet() {
BitSet b = new BitSet(size);
for (int i = 0; i < size; i++) {
if (unsafeGet(i)) {
b.set(i);
}
}
return b;
}
@Override
public long toLong() {
return data;
}
@Override
public BigInteger toBigInteger() {
final BigInteger result;
if (data >= 0) {
result = BigInteger.valueOf(data);
} else {
BigInteger missingLowestBit = BigInteger.valueOf(data >>> 1).shiftLeft(1);
if ((data & 1) == 1) {
result = missingLowestBit.setBit(0);
} else {
result = missingLowestBit;
}
}
return result;
}
public void copyFrom(long value) {
Preconditions.checkArgument(64 - Long.numberOfLeadingZeros(value) <= size, "value doesn't fit");
data = value;
}
@Override
public int compareTo(BitVector o) {
checkSize(o);
final int cmp;
// optimisation
if (o.size() <= 64) {
// 0, positives, Long.MAX_VALUE, Long.MIN_VALUE, negatives, -1
long x = data + Long.MIN_VALUE;
long y = o.toExactLong() + Long.MIN_VALUE;
cmp = Long.compare(x, y);
assert Integer.signum(cmp) == Integer.signum(
BitSetComparator.INSTANCE.compare(toBitSet(), o.toBitSet()));
} else {
cmp = BitSetComparator.INSTANCE.compare(toBitSet(), o.toBitSet());
}
return cmp;
}
@Override
public void copyFrom(BitSet from) {
int localSize = size;
long value = 0;
for (int i = from.nextSetBit(0); i != -1; i = from.nextSetBit(i + 1)) {
Preconditions.checkArgument(i < localSize, "bit set too large");
value |= 1L << i;
}
data = value;
}
@Override
public void copyFromSection(BitVector src, int fromIndex) {
Preconditions.checkArgument(fromIndex >= 0, "fromIndex must be non-negative");
int srcSize = src.size();
int toIndex = fromIndex + size;
Preconditions.checkArgument(toIndex <= srcSize, "not enough bits in src");
long value;
if (toIndex <= 64) {
long srcData = src.toLong();
value = (srcData >>> fromIndex) & mask;
} else {
value = 0;
for (int i = src.nextSetBit(fromIndex); i < toIndex && i != -1; i = src.nextSetBit(i + 1)) {
value |= 1L << (i - fromIndex);
}
}
data = value;
}
@Override
public long toExactLong() {
return data;
}
@Override
public void smallerEvenAndGrayCode() {
long localData = data;
if ((localData & 0x1) == 1) {
assert size > 0;
data = localData ^ (localData >>> 1) ^ 0x1;
} else {
if (localData != 0) {
long dataMinusTwo = localData - 2;
data = dataMinusTwo ^ (dataMinusTwo >>> 1);
}
}
}
@Override
public void grayCodeRank(BitVector mu, BitVector w) {
grayCodeRank(mu, w, true);
}
/**
* Visible for testing.
*/
void grayCodeRank(BitVector mu, BitVector w, boolean optimiseIfPossible) {
int theirSize = mu.size();
Preconditions.checkArgument(theirSize == w.size(), "mu/w size mismatch");
int muLen = mu.length();
long pow2pos = 1L;
long value = 0;
if (optimiseIfPossible & muLen <= 64) {
// mu doesn't have any set bits over index 63
long muLong = mu.toExactLong();
// w might have some set bits over index 63, but they don't matter anyway
long wLong = w.toLong();
long pow2i = 1L;
for (int i = 0; i < muLen; ++i) {
if ((muLong & pow2i) != 0) {
if ((wLong & pow2i) != 0) {
value |= pow2pos;
}
pow2pos <<= 1;
}
pow2i <<= 1;
}
} else {
for (int j = theirSize == 0 ? -1 : mu.nextSetBit(0); j != -1;
j = j == theirSize - 1 ? -1 : mu.nextSetBit(j + 1)) {
if (w.get(j)) {
value |= pow2pos;
}
pow2pos <<= 1;
}
}
assert pow2pos == 1L << mu.cardinality();
Preconditions.checkArgument(1L << size == pow2pos, "wrong size");
data = value;
}
@Override
public int lowestDifferentBit() {
long localData = data;
final int value;
if ((localData & 0x1L) == 0) {
if (localData == 0) {
value = 0;
} else {
value = Long.numberOfTrailingZeros(localData);
}
} else {
if (localData == mask) {
value = 0;
} else {
value = Long.numberOfTrailingZeros(~localData);
}
}
assert value == 0 || (0 < value & value < size);
return value;
}
@Override
public void grayCodeRankInverse(BitVector mu, BitVector known, BitVector r) {
int muSize = mu.size();
Preconditions.checkArgument(size == muSize, "i/mu size mismatch");
// Will fail if the sizes are different.
Preconditions.checkArgument(!known.intersects(mu), "known and mu must not intersect");
long muLong = mu.toExactLong();
long knownLong = known.toExactLong();
// Will check r.size() against mu.cardinality later.
int rSize = r.size();
Preconditions.checkArgument(rSize <= muSize, "r is too large");
long rLong = r.toExactLong();
long value = 0;
int pos = 0;
int muLength = mu.length();
long pow2k = 1L;
for (int k = 0; k < muLength; ++k) {
if ((muLong & pow2k) != 0) {
if ((rLong >> pos & 1L) != 0) {
value |= pow2k;
}
++pos;
}
pow2k <<= 1;
}
assert pos == mu.cardinality();
Preconditions.checkArgument(pos == rSize, "r.size()/mu.cardinality() mismatch");
int knownLength = known.length();
for (int k = Math.max(muLength - 1, knownLength); --k >= 0; ) {
pow2k = 1L << k;
if ((muLong & pow2k) == 0) {
assert (value & pow2k) == 0;
if (((knownLong & pow2k) ^ (value >> 1 & pow2k)) != 0) {
value |= pow2k;
}
}
}
data = value;
}
@Override
public void copySectionFrom(int offset, BitVector src) {
int srcSize = src.size();
int toIndex = offset + srcSize;
if (offset < 0 | toIndex > size) {
throw new IndexOutOfBoundsException(
"invalid range: offset=" + offset + " src.size()=" + src.size());
}
if (offset != toIndex) {
unsafeClearNonEmptySection(offset, toIndex);
long srcData = src.toExactLong();
data |= srcData << offset;
}
}
@Override
public long[] toLongArray() {
return size == 0 ? ArrayUtils.EMPTY_LONG_ARRAY : new long[] {data};
}
@Override
public byte[] toBigEndianByteArray() {
int n = MathUtils.bitCountToByteCount(size);
byte[] a = new byte[n];
long x = data;
for (int i = 0; i < n; ) {
a[n - ++i] = (byte) (x & 0xFF);
x >>>= 8;
}
assert x == 0;
return a;
}
@Override
public void copyFrom(long[] array) {
if (size == 0) {
Preconditions.checkArgument(array.length == 0, "Array must be empty.");
} else {
Preconditions.checkArgument(array.length == 1, "Array length must be 1.");
copyFrom(array[0]);
}
}
@Override
public void copyFromBigEndian(byte[] array) {
int n = MathUtils.bitCountToByteCount(size);
Preconditions.checkArgument(array.length == n, "Array length must be %s.", n);
long x = 0;
for (int i = 0; i < n - 1; ) {
x |= (array[i++] & 0xFF);
x <<= 8;
}
if (n != 0) {
x |= (array[n - 1] & 0xFF);
}
copyFrom(x);
}
@Override
public boolean areAllLowestBitsClear(int bitCount) {
Preconditions.checkArgument(0 <= bitCount & bitCount <= size, "bitCount is out of range");
// Only bitCount == 64 is affected by xoring with (bitCount >> 6).
return (data & (((1L << bitCount) ^ (bitCount >> 6)) - 1)) == 0;
}
@Override
public void copyFrom(BigInteger s) {
Preconditions.checkArgument(s.signum() >= 0, s);
Preconditions.checkArgument(s.bitLength() <= size());
// Note that the long value will be negative iff bitLength == 644 and bit 63
// is set.
copyFrom(s.longValue());
}
}
| apache-2.0 |
srikalyan/spring-boot | spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/transaction/package-info.java | 736 | /*
* Copyright 2012-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Auto-configuration for transaction suport.
*/
package org.springframework.boot.autoconfigure.transaction;
| apache-2.0 |
goldmansachs/reladomo | reladomo/src/main/java/com/gs/fw/common/mithra/util/fileparser/ClassReaderState.java | 2312 | /*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package com.gs.fw.common.mithra.util.fileparser;
import java.io.IOException;
import java.io.StreamTokenizer;
import java.text.ParseException;
public class ClassReaderState extends ParserState
{
public ClassReaderState(AbstractMithraDataFileParser parser)
{
super(parser);
}
public ParserState parse(StreamTokenizer st) throws IOException, ParseException
{
int token = st.ttype;
if (token != StreamTokenizer.TT_WORD || !st.sval.equals(AbstractMithraDataFileParser.CLASS_IDENTIFIER))
{
throw new ParseException("expected line " + st.lineno() + " to begin with class", st.lineno());
}
token = st.nextToken();
if (token != StreamTokenizer.TT_WORD)
{
throw new ParseException("expected a class name on line "+st.lineno(), st.lineno());
}
this.getParser().addNewMithraParsedData();
String className = st.sval;
try
{
this.getParser().getCurrentParsedData().setParsedClassName(className);
}
catch (Exception e)
{
ParseException parseException = new ParseException("no such class (or finder): "+className+" on line "+st.lineno(), st.lineno());
parseException.initCause(e);
throw parseException;
}
this.getParser().getDataReaderState().setClass(className, st.lineno());
token = st.nextToken();
if (token != StreamTokenizer.TT_EOL)
{
throw new ParseException("invalid data after the class name on line "+st.lineno(), st.lineno());
}
return this.getParser().getAttributeReaderState();
}
}
| apache-2.0 |
TatsianaKasiankova/pentaho-kettle | integration/src/it/java/org/pentaho/di/trans/RowProducerIT.java | 5094 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans;
import java.util.concurrent.TimeUnit;
import junit.framework.TestCase;
import org.pentaho.di.core.RowSet;
import org.pentaho.di.core.row.RowMetaInterface;
/**
* Tests for RowProducer
*
*/
public class RowProducerIT extends TestCase {
/**
* Verify that putting a row into a RowProducer does not block if the underlying rowSet is not full.
*/
public void testPutRow_not_full() {
final int attempts = 1;
MockBlockingRowSet rs = new MockBlockingRowSet( attempts );
RowProducer rp = new RowProducer( null, rs );
rp.putRow( null, null );
assertEquals( "Total attempts to putRow() exceeded expected amount", attempts, rs.getTotalAttempts() );
}
/**
* Verify that putting a row into a RowProducer blocks until the row is successfully added instead of returning
* immediately.
*/
public void testPutRow_full() {
final int attempts = 10;
MockBlockingRowSet rs = new MockBlockingRowSet( attempts );
RowProducer rp = new RowProducer( null, rs );
rp.putRow( null, null );
assertEquals( "Total attempts to putRow() exceeded expected amount", attempts, rs.getTotalAttempts() );
}
class MockBlockingRowSet implements RowSet {
// The number of calls to putRowWait() that it requires to actually put a row.
private final int reqdAttempts;
// Number of times putRowWait() has been called.
private int totalAttempts;
/**
* Create a blocking row set that requires {@code attempts} calls to
* {@link #putRowWait(RowMetaInterface, Object[], long, TimeUnit)} before actually adding the row.
*
* @param attempts
* Number of calls required to actually put a row.
*/
public MockBlockingRowSet( int attempts ) {
this.reqdAttempts = attempts;
totalAttempts = 0;
}
public int getTotalAttempts() {
return totalAttempts;
}
public boolean putRow( RowMetaInterface rowMeta, Object[] rowData ) {
throw new UnsupportedOperationException();
}
public boolean putRowWait( RowMetaInterface rowMeta, Object[] rowData, long time, TimeUnit tu ) {
totalAttempts++;
if ( totalAttempts % reqdAttempts == 0 ) {
return true;
}
try {
Thread.sleep( 10 ); // Simulate overhead of blocking
} catch ( Exception ex ) {
throw new RuntimeException( ex );
}
return false;
}
public Object[] getRow() {
throw new UnsupportedOperationException();
}
public Object[] getRowImmediate() {
throw new UnsupportedOperationException();
}
public Object[] getRowWait( long timeout, TimeUnit tu ) {
throw new UnsupportedOperationException();
}
public void setDone() {
throw new UnsupportedOperationException();
}
public boolean isDone() {
throw new UnsupportedOperationException();
}
public String getOriginStepName() {
throw new UnsupportedOperationException();
}
public int getOriginStepCopy() {
throw new UnsupportedOperationException();
}
public String getDestinationStepName() {
throw new UnsupportedOperationException();
}
public int getDestinationStepCopy() {
throw new UnsupportedOperationException();
}
public String getName() {
throw new UnsupportedOperationException();
}
public int size() {
throw new UnsupportedOperationException();
}
public void setThreadNameFromToCopy( String from, int from_copy, String to, int to_copy ) {
throw new UnsupportedOperationException();
}
public RowMetaInterface getRowMeta() {
throw new UnsupportedOperationException();
}
public void setRowMeta( RowMetaInterface rowMeta ) {
throw new UnsupportedOperationException();
}
public String getRemoteSlaveServerName() {
throw new UnsupportedOperationException();
}
public void setRemoteSlaveServerName( String remoteSlaveServerName ) {
throw new UnsupportedOperationException();
}
public boolean isBlocking() {
return true;
}
public void clear() {
throw new UnsupportedOperationException();
}
}
}
| apache-2.0 |
goodwinnk/intellij-community | platform/structuralsearch/source/com/intellij/structuralsearch/impl/matcher/XmlCompiledPattern.java | 748 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.structuralsearch.impl.matcher;
import com.intellij.structuralsearch.impl.matcher.strategies.XmlMatchingStrategy;
/**
* @author Eugene.Kudelevsky
*/
public class XmlCompiledPattern extends CompiledPattern {
private static final String XML_TYPED_VAR_PREFIX = "__";
public XmlCompiledPattern() {
setStrategy(XmlMatchingStrategy.getInstance());
}
@Override
public String[] getTypedVarPrefixes() {
return new String[] {XML_TYPED_VAR_PREFIX};
}
@Override
public boolean isTypedVar(final String str) {
return str.trim().startsWith(XML_TYPED_VAR_PREFIX);
}
}
| apache-2.0 |
packet-tracker/onos-1.4.0-custom-build | apps/dhcp/target/generated-sources/org/onosproject/dhcp/rest/ApiDocRegistrator.java | 1156 | /*
* Auto-generated by OnosSwaggerMojo.
*
* Copyright 2015 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.dhcp.rest;
import org.apache.felix.scr.annotations.Component;
import org.onosproject.rest.AbstractApiDocRegistrator;
import org.onosproject.rest.ApiDocProvider;
@Component(immediate = true)
public class ApiDocRegistrator extends AbstractApiDocRegistrator {
public ApiDocRegistrator() {
super(new ApiDocProvider("/onos/dhcp",
"DHCP Server REST API",
ApiDocRegistrator.class.getClassLoader()));
}
}
| apache-2.0 |
Squeegee/batik | sources/org/apache/batik/ext/awt/image/rendered/TileGenerator.java | 1080 | /*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.ext.awt.image.rendered;
import java.awt.image.Raster;
/**
* This the generic interface for a source of tiles. This is used
* when the cache has a miss.
*
* @version $Id$
*/
public interface TileGenerator {
Raster genTile(int x, int y);
}
| apache-2.0 |
PasaLab/tachyon | core/server/worker/src/main/java/alluxio/worker/block/ClientRWLock.java | 3343 | /*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.worker.block;
import alluxio.Configuration;
import alluxio.PropertyKey;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import javax.annotation.concurrent.ThreadSafe;
/**
* Read/write lock associated with clients rather than threads. Either its read lock or write lock
* can be released by a thread different from the one acquiring them (but supposed to be requested
* by the same client).
*/
@ThreadSafe
public final class ClientRWLock implements ReadWriteLock {
/** Total number of permits. This value decides the max number of concurrent readers. */
private static final int MAX_AVAILABLE =
Configuration.getInt(PropertyKey.WORKER_TIERED_STORE_BLOCK_LOCK_READERS);
/**
* Uses the unfair lock to prevent a read lock that fails to release from locking the block
* forever and thus blocking all the subsequent write access.
* See https://alluxio.atlassian.net/browse/ALLUXIO-2636.
*/
private final Semaphore mAvailable = new Semaphore(MAX_AVAILABLE, false);
/** Reference count. */
private AtomicInteger mReferences = new AtomicInteger();
/**
* Constructs a new {@link ClientRWLock}.
*/
public ClientRWLock() {}
@Override
public Lock readLock() {
return new SessionLock(1);
}
@Override
public Lock writeLock() {
return new SessionLock(MAX_AVAILABLE);
}
/**
* @return the reference count
*/
public int getReferenceCount() {
return mReferences.get();
}
/**
* Increments the reference count.
*/
public void addReference() {
mReferences.incrementAndGet();
}
/**
* Decrements the reference count.
*
* @return the new reference count
*/
public int dropReference() {
return mReferences.decrementAndGet();
}
private final class SessionLock implements Lock {
private final int mPermits;
private SessionLock(int permits) {
mPermits = permits;
}
@Override
public void lock() {
mAvailable.acquireUninterruptibly(mPermits);
}
@Override
public void lockInterruptibly() throws InterruptedException {
mAvailable.acquire(mPermits);
}
@Override
public boolean tryLock() {
return mAvailable.tryAcquire(mPermits);
}
@Override
public boolean tryLock(long time, TimeUnit unit) throws InterruptedException {
return mAvailable.tryAcquire(mPermits, time, unit);
}
@Override
public void unlock() {
mAvailable.release(mPermits);
}
@Override
public Condition newCondition() {
throw new UnsupportedOperationException("newCondition() is not supported");
}
}
}
| apache-2.0 |
pfirmstone/JGDMS | qa/jtreg/net/jini/security/ProxyPreparer/LocalClassLoaderVerifyProxyPreparer.java | 1930 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.rmi.RemoteException;
import java.security.Permission;
import java.util.Collections;
import net.jini.core.constraint.MethodConstraints;
import net.jini.security.BasicProxyPreparer;
import net.jini.security.Security;
/**
* A basic proxy preparer that uses this class's class loader when verifying
* trust in proxies.
*/
public class LocalClassLoaderVerifyProxyPreparer extends BasicProxyPreparer {
public LocalClassLoaderVerifyProxyPreparer(boolean verify,
Permission[] permissions)
{
super(verify, permissions);
}
public LocalClassLoaderVerifyProxyPreparer(
boolean verify,
MethodConstraints methodConstraints,
Permission[] permissions)
{
super(verify, methodConstraints, permissions);
}
protected void verify(Object proxy) throws RemoteException {
if (proxy == null) {
throw new NullPointerException("Proxy cannot be null");
} else if (verify) {
MethodConstraints mc = getMethodConstraints(proxy);
Security.verifyObjectTrust(
proxy, getClass().getClassLoader(),
mc == null ? Collections.EMPTY_SET : Collections.singleton(mc));
}
}
}
| apache-2.0 |
potto007/druid-avro | api/src/test/java/io/druid/data/input/impl/RegexParseSpecTest.java | 1963 | /*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.data.input.impl;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.druid.TestObjectMapper;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
/**
*/
public class RegexParseSpecTest
{
private final ObjectMapper jsonMapper = new TestObjectMapper();
@Test
public void testSerde() throws IOException
{
RegexParseSpec spec = new RegexParseSpec(
new TimestampSpec("abc", "iso", null),
new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("abc")), null, null),
"\u0001",
Arrays.asList("abc"),
"abc"
);
final RegexParseSpec serde = jsonMapper.readValue(
jsonMapper.writeValueAsString(spec),
RegexParseSpec.class
);
Assert.assertEquals("abc", serde.getTimestampSpec().getTimestampColumn());
Assert.assertEquals("iso", serde.getTimestampSpec().getTimestampFormat());
Assert.assertEquals("abc", serde.getPattern());
Assert.assertEquals("\u0001", serde.getListDelimiter());
Assert.assertEquals(Arrays.asList("abc"), serde.getDimensionsSpec().getDimensionNames());
}
}
| apache-2.0 |
rswijesena/carbon-identity | components/sso-saml/org.wso2.carbon.identity.sso.saml/src/main/java/org/wso2/carbon/identity/sso/saml/admin/SAMLSSOConfigAdmin.java | 11010 | /*
* Copyright 2005-2007 WSO2, Inc. (http://wso2.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.identity.sso.saml.admin;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.opensaml.saml1.core.NameIdentifier;
import org.wso2.carbon.identity.base.IdentityException;
import org.wso2.carbon.identity.core.model.SAMLSSOServiceProviderDO;
import org.wso2.carbon.identity.core.persistence.IdentityPersistenceManager;
import org.wso2.carbon.identity.core.util.IdentityUtil;
import org.wso2.carbon.identity.sso.saml.dto.SAMLSSOServiceProviderDTO;
import org.wso2.carbon.identity.sso.saml.dto.SAMLSSOServiceProviderInfoDTO;
import org.wso2.carbon.registry.core.Registry;
import org.wso2.carbon.registry.core.session.UserRegistry;
/**
* This class is used for managing SAML SSO providers. Adding, retrieving and removing service
* providers are supported here.
* In addition to that logic for generating key pairs for tenants except for tenant 0, is included
* here.
*/
public class SAMLSSOConfigAdmin {
private static Log log = LogFactory.getLog(SAMLSSOConfigAdmin.class);
private UserRegistry registry;
public SAMLSSOConfigAdmin(Registry userRegistry) {
registry = (UserRegistry) userRegistry;
}
/**
* Add a new service provider
*
* @param serviceProviderDTO service Provider DTO
* @return true if successful, false otherwise
* @throws IdentityException if fails to load the identity persistence manager
*/
public boolean addRelyingPartyServiceProvider(SAMLSSOServiceProviderDTO serviceProviderDTO) throws IdentityException {
SAMLSSOServiceProviderDO serviceProviderDO = new SAMLSSOServiceProviderDO();
if (serviceProviderDTO.getIssuer() == null || serviceProviderDTO.getIssuer().equals("")) {
String message = "A value for the Issuer is mandatory";
log.error(message);
throw new IdentityException(message);
}
if (serviceProviderDTO.getIssuer().contains("@")) {
String message = "\'@\' is a reserved character. Cannot be used for Service Provider Entity ID";
log.error(message);
throw new IdentityException(message);
}
serviceProviderDO.setIssuer(serviceProviderDTO.getIssuer());
serviceProviderDO.setAssertionConsumerUrl(serviceProviderDTO.getAssertionConsumerUrl());
serviceProviderDO.setCertAlias(serviceProviderDTO.getCertAlias());
serviceProviderDO.setUseFullyQualifiedUsername(serviceProviderDTO.isUseFullyQualifiedUsername());
serviceProviderDO.setDoSingleLogout(serviceProviderDTO.isDoSingleLogout());
serviceProviderDO.setLoginPageURL(serviceProviderDTO.getLoginPageURL());
serviceProviderDO.setLogoutURL(serviceProviderDTO.getLogoutURL());
serviceProviderDO.setDoSignResponse(serviceProviderDTO.isDoSignResponse());
serviceProviderDO.setDoSignAssertions(serviceProviderDTO.isDoSignAssertions());
serviceProviderDO.setNameIdClaimUri(serviceProviderDTO.getNameIdClaimUri());
serviceProviderDO.setEnableAttributesByDefault(serviceProviderDTO.isEnableAttributesByDefault());
if (serviceProviderDTO.getNameIDFormat() == null) {
serviceProviderDTO.setNameIDFormat(NameIdentifier.EMAIL);
} else {
serviceProviderDTO.setNameIDFormat(serviceProviderDTO.getNameIDFormat().replace("/",
":"));
}
serviceProviderDO.setNameIDFormat(serviceProviderDTO.getNameIDFormat());
if (serviceProviderDTO.getRequestedClaims() != null && serviceProviderDTO.getRequestedClaims().length != 0) {
if (serviceProviderDTO.getAttributeConsumingServiceIndex() != null &&
!serviceProviderDTO.getAttributeConsumingServiceIndex().equals("")) {
serviceProviderDO.setAttributeConsumingServiceIndex(serviceProviderDTO.getAttributeConsumingServiceIndex());
} else {
serviceProviderDO.setAttributeConsumingServiceIndex(Integer.toString(IdentityUtil.getRandomInteger()));
}
serviceProviderDO.setRequestedClaims(serviceProviderDTO.getRequestedClaims());
} else {
if (serviceProviderDTO.getAttributeConsumingServiceIndex() == null ||
serviceProviderDTO.getAttributeConsumingServiceIndex().isEmpty()) {
serviceProviderDO.setAttributeConsumingServiceIndex(Integer.toString(IdentityUtil.getRandomInteger()));
} else {
serviceProviderDO.setAttributeConsumingServiceIndex(serviceProviderDTO.getAttributeConsumingServiceIndex());
}
}
if (serviceProviderDTO.getRequestedAudiences() != null && serviceProviderDTO.getRequestedAudiences().length != 0) {
serviceProviderDO.setRequestedAudiences(serviceProviderDTO.getRequestedAudiences());
}
if (serviceProviderDTO.getRequestedRecipients() != null && serviceProviderDTO.getRequestedRecipients().length != 0) {
serviceProviderDO.setRequestedRecipients(serviceProviderDTO.getRequestedRecipients());
}
serviceProviderDO.setIdPInitSSOEnabled(serviceProviderDTO.isIdPInitSSOEnabled());
serviceProviderDO.setDoEnableEncryptedAssertion(serviceProviderDTO.isDoEnableEncryptedAssertion());
serviceProviderDO.setDoValidateSignatureInRequests(serviceProviderDTO.isDoValidateSignatureInRequests());
IdentityPersistenceManager persistenceManager = IdentityPersistenceManager
.getPersistanceManager();
try {
return persistenceManager.addServiceProvider(registry, serviceProviderDO);
} catch (IdentityException e) {
log.error("Error obtaining a registry for adding a new service provider", e);
throw new IdentityException("Error obtaining a registry for adding a new service provider", e);
}
}
/**
* Retrieve all the relying party service providers
*
* @return set of RP Service Providers + file path of pub. key of generated key pair
*/
public SAMLSSOServiceProviderInfoDTO getServiceProviders() throws IdentityException {
SAMLSSOServiceProviderDTO[] serviceProviders = null;
try {
IdentityPersistenceManager persistenceManager = IdentityPersistenceManager
.getPersistanceManager();
SAMLSSOServiceProviderDO[] providersSet = persistenceManager.
getServiceProviders(registry);
serviceProviders = new SAMLSSOServiceProviderDTO[providersSet.length];
for (int i = 0; i < providersSet.length; i++) {
SAMLSSOServiceProviderDO providerDO = providersSet[i];
SAMLSSOServiceProviderDTO providerDTO = new SAMLSSOServiceProviderDTO();
providerDTO.setIssuer(providerDO.getIssuer());
providerDTO.setAssertionConsumerUrl(providerDO.getAssertionConsumerUrl());
providerDTO.setCertAlias(providerDO.getCertAlias());
providerDTO.setAttributeConsumingServiceIndex(providerDO.getAttributeConsumingServiceIndex());
providerDTO.setUseFullyQualifiedUsername(providerDO.isUseFullyQualifiedUsername());
providerDTO.setDoSignResponse(providerDO.isDoSignResponse());
providerDTO.setDoSignAssertions(providerDO.isDoSignAssertions());
providerDTO.setDoSingleLogout(providerDO.isDoSingleLogout());
if (providerDO.getLoginPageURL() == null || "null".equals(providerDO.getLoginPageURL())) {
providerDTO.setLoginPageURL("");
} else {
providerDTO.setLoginPageURL(providerDO.getLoginPageURL());
}
if (providerDO.getLogoutURL() == null || "null".equals(providerDO.getLogoutURL())) {
providerDTO.setLogoutURL("");
} else {
providerDTO.setLogoutURL(providerDO.getLogoutURL());
}
providerDTO.setRequestedClaims(providerDO.getRequestedClaims());
providerDTO.setRequestedAudiences(providerDO.getRequestedAudiences());
providerDTO.setRequestedRecipients(providerDO.getRequestedRecipients());
providerDTO.setEnableAttributesByDefault(providerDO.isEnableAttributesByDefault());
providerDTO.setNameIdClaimUri(providerDO.getNameIdClaimUri());
providerDTO.setNameIDFormat(providerDO.getNameIDFormat());
if (providerDTO.getNameIDFormat() == null) {
providerDTO.setNameIDFormat(NameIdentifier.EMAIL);
}
providerDTO.setNameIDFormat(providerDTO.getNameIDFormat().replace(":", "/"));
providerDTO.setIdPInitSSOEnabled(providerDO.isIdPInitSSOEnabled());
providerDTO.setDoEnableEncryptedAssertion(providerDO.isDoEnableEncryptedAssertion());
providerDTO.setDoValidateSignatureInRequests(providerDO.isDoValidateSignatureInRequests());
serviceProviders[i] = providerDTO;
}
} catch (IdentityException e) {
log.error("Error obtaining a registry intance for reading service provider list", e);
throw new IdentityException("Error obtaining a registry intance for reading service provider list", e);
}
SAMLSSOServiceProviderInfoDTO serviceProviderInfoDTO = new SAMLSSOServiceProviderInfoDTO();
serviceProviderInfoDTO.setServiceProviders(serviceProviders);
//if it is tenant zero
if (registry.getTenantId() == 0) {
serviceProviderInfoDTO.setTenantZero(true);
}
return serviceProviderInfoDTO;
}
/**
* Remove an existing service provider.
*
* @param issuer issuer name
* @return true is successful
* @throws IdentityException
*/
public boolean removeServiceProvider(String issuer) throws IdentityException {
try {
IdentityPersistenceManager persistenceManager = IdentityPersistenceManager.getPersistanceManager();
return persistenceManager.removeServiceProvider(registry, issuer);
} catch (IdentityException e) {
log.error("Error removing a Service Provider");
throw new IdentityException("Error removing a Service Provider", e);
}
}
}
| apache-2.0 |
MikeThomsen/nifi | nifi-registry/nifi-registry-core/nifi-registry-client/src/test/java/org/apache/nifi/registry/client/impl/request/TestBearerTokenRequestConfig.java | 1642 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.registry.client.impl.request;
import org.apache.nifi.registry.client.RequestConfig;
import org.junit.Test;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
public class TestBearerTokenRequestConfig {
@Test
public void testBearerTokenRequestConfig() {
final String token = "some-token";
final String expectedHeaderValue = "Bearer " + token;
final RequestConfig requestConfig = new BearerTokenRequestConfig(token);
final Map<String,String> headers = requestConfig.getHeaders();
assertNotNull(headers);
assertEquals(1, headers.size());
final String authorizationHeaderValue = headers.get("Authorization");
assertEquals(expectedHeaderValue, authorizationHeaderValue);
}
}
| apache-2.0 |
bbrouwer/spring-boot | spring-boot-project/spring-boot-actuator/src/main/java/org/springframework/boot/actuate/endpoint/cache/CachingConfiguration.java | 1266 | /*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.endpoint.cache;
/**
* The caching configuration of an endpoint.
*
* @author Stephane Nicoll
* @since 2.0.0
*/
public class CachingConfiguration {
private final long timeToLive;
/**
* Create a new instance with the given {@code timeToLive}.
* @param timeToLive the time to live of an operation result in milliseconds
*/
public CachingConfiguration(long timeToLive) {
this.timeToLive = timeToLive;
}
/**
* Returns the time to live of a cached operation result.
* @return the time to live of an operation result
*/
public long getTimeToLive() {
return this.timeToLive;
}
}
| apache-2.0 |
cristianonicolai/drools-wb | drools-wb-screens/drools-wb-guided-dtree-editor/drools-wb-guided-dtree-editor-backend/src/test/java/org/drools/workbench/screens/guided/dtree/backend/server/indexing/classes/Applicant.java | 795 | /*
* Copyright 2014 JBoss, by Red Hat, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.workbench.screens.guided.dtree.backend.server.indexing.classes;
public class Applicant {
private int age;
public int getAge() {
return age;
}
}
| apache-2.0 |
jimengliu/cattle | code/implementation/docker/machine/src/main/java/io/cattle/platform/docker/machine/api/filter/MachineValidationFilter.java | 2296 | package io.cattle.platform.docker.machine.api.filter;
import static io.cattle.platform.core.constants.MachineConstants.*;
import io.cattle.platform.core.constants.AccountConstants;
import io.cattle.platform.core.constants.HostConstants;
import io.cattle.platform.core.model.Host;
import io.cattle.platform.iaas.api.filter.common.AbstractDefaultResourceManagerFilter;
import io.cattle.platform.util.type.CollectionUtils;
import io.github.ibuildthecloud.gdapi.exception.ClientVisibleException;
import io.github.ibuildthecloud.gdapi.request.ApiRequest;
import io.github.ibuildthecloud.gdapi.request.resource.ResourceManager;
import io.github.ibuildthecloud.gdapi.util.ResponseCodes;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
public class MachineValidationFilter extends AbstractDefaultResourceManagerFilter {
private static final String DRIVER_CONFIG_EXACTLY_ONE_REQUIRED = "DriverConfigExactlyOneRequired";
@Override
public Class<?>[] getTypeClasses() {
return new Class<?>[] {Host.class};
}
@Override
public String[] getTypes() {
return new String[] { KIND_MACHINE };
}
@Override
public Object create(String type, ApiRequest request, ResourceManager next) {
// Don't validate hosts for v1 API
if (HostConstants.TYPE.equals(type) && ("v1".equals(request.getVersion()) ||
AccountConstants.SUPER_ADMIN_KIND.equals(request.getSchemaFactory().getId()))) {
return super.create(type, request, next);
}
Map<String, Object> data = CollectionUtils.toMap(request.getRequestObject());
boolean alreadyFound = false;
for (Map.Entry<String, Object> field : data.entrySet()) {
if (StringUtils.endsWithIgnoreCase(field.getKey(), CONFIG_FIELD_SUFFIX) && field.getValue() != null) {
if (alreadyFound) {
throw new ClientVisibleException(ResponseCodes.UNPROCESSABLE_ENTITY, DRIVER_CONFIG_EXACTLY_ONE_REQUIRED);
}
alreadyFound = true;
}
}
if (!alreadyFound) {
throw new ClientVisibleException(ResponseCodes.UNPROCESSABLE_ENTITY, DRIVER_CONFIG_EXACTLY_ONE_REQUIRED);
}
return super.create(type, request, next);
}
}
| apache-2.0 |
shun634501730/java_source_cn | src_en/com/sun/org/apache/xerces/internal/impl/dv/util/HexBin.java | 3681 | /*
* Copyright (c) 2007, 2016, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 1999-2002,2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.org.apache.xerces.internal.impl.dv.util;
/**
* format validation
*
* This class encodes/decodes hexadecimal data
*
* @xerces.internal
*
* @author Jeffrey Rodriguez
*/
public final class HexBin {
static private final int BASELENGTH = 128;
static private final int LOOKUPLENGTH = 16;
static final private byte [] hexNumberTable = new byte[BASELENGTH];
static final private char [] lookUpHexAlphabet = new char[LOOKUPLENGTH];
static {
for (int i = 0; i < BASELENGTH; i++ ) {
hexNumberTable[i] = -1;
}
for ( int i = '9'; i >= '0'; i--) {
hexNumberTable[i] = (byte) (i-'0');
}
for ( int i = 'F'; i>= 'A'; i--) {
hexNumberTable[i] = (byte) ( i-'A' + 10 );
}
for ( int i = 'f'; i>= 'a'; i--) {
hexNumberTable[i] = (byte) ( i-'a' + 10 );
}
for(int i = 0; i<10; i++ ) {
lookUpHexAlphabet[i] = (char)('0'+i);
}
for(int i = 10; i<=15; i++ ) {
lookUpHexAlphabet[i] = (char)('A'+i -10);
}
}
/**
* Encode a byte array to hex string
*
* @param binaryData array of byte to encode
* @return return encoded string
*/
static public String encode(byte[] binaryData) {
if (binaryData == null)
return null;
int lengthData = binaryData.length;
int lengthEncode = lengthData * 2;
char[] encodedData = new char[lengthEncode];
int temp;
for (int i = 0; i < lengthData; i++) {
temp = binaryData[i];
if (temp < 0)
temp += 256;
encodedData[i*2] = lookUpHexAlphabet[temp >> 4];
encodedData[i*2+1] = lookUpHexAlphabet[temp & 0xf];
}
return new String(encodedData);
}
/**
* Decode hex string to a byte array
*
* @param encoded encoded string
* @return return array of byte to encode
*/
static public byte[] decode(String encoded) {
if (encoded == null)
return null;
int lengthData = encoded.length();
if (lengthData % 2 != 0)
return null;
char[] binaryData = encoded.toCharArray();
int lengthDecode = lengthData / 2;
byte[] decodedData = new byte[lengthDecode];
byte temp1, temp2;
char tempChar;
for( int i = 0; i<lengthDecode; i++ ){
tempChar = binaryData[i*2];
temp1 = (tempChar < BASELENGTH) ? hexNumberTable[tempChar] : -1;
if (temp1 == -1)
return null;
tempChar = binaryData[i*2+1];
temp2 = (tempChar < BASELENGTH) ? hexNumberTable[tempChar] : -1;
if (temp2 == -1)
return null;
decodedData[i] = (byte)((temp1 << 4) | temp2);
}
return decodedData;
}
}
| apache-2.0 |
vineetgarg02/hive | storage-api/src/java/org/apache/hadoop/hive/common/io/FileMetadataCache.java | 2471 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.common.io;
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicBoolean;
import java.io.IOException;
import java.io.InputStream;
import org.apache.hadoop.hive.common.io.encoded.MemoryBufferOrBuffers;
public interface FileMetadataCache {
/**
* @return Metadata for a given file (ORC or Parquet footer).
* The caller must decref this buffer when done.
*/
MemoryBufferOrBuffers getFileMetadata(Object fileKey);
@Deprecated
MemoryBufferOrBuffers putFileMetadata(
Object fileKey, int length, InputStream is) throws IOException;
@Deprecated
MemoryBufferOrBuffers putFileMetadata(Object fileKey, ByteBuffer tailBuffer);
@Deprecated
MemoryBufferOrBuffers putFileMetadata(
Object fileKey, int length, InputStream is, CacheTag tag) throws IOException;
@Deprecated
MemoryBufferOrBuffers putFileMetadata(Object fileKey, ByteBuffer tailBuffer, CacheTag tag);
/**
* Releases the buffer returned from getFileMetadata or putFileMetadata method.
* @param buffer The buffer to release.
*/
void decRefBuffer(MemoryBufferOrBuffers buffer);
/**
* Puts the metadata for a given file (e.g. a footer buffer into cache).
* @param fileKey The file key.
* @return The buffer or buffers representing the cached footer.
* The caller must decref this buffer when done.
*/
MemoryBufferOrBuffers putFileMetadata(Object fileKey, ByteBuffer tailBuffer,
CacheTag tag, AtomicBoolean isStopped);
MemoryBufferOrBuffers putFileMetadata(Object fileKey, int length,
InputStream is, CacheTag tag, AtomicBoolean isStopped) throws IOException;
}
| apache-2.0 |
pjain1/druid | server/src/test/java/org/apache/druid/server/initialization/JettyTest.java | 18632 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.server.initialization;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.inject.Binder;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.multibindings.Multibinder;
import org.apache.commons.io.IOUtils;
import org.apache.druid.guice.GuiceInjectors;
import org.apache.druid.guice.Jerseys;
import org.apache.druid.guice.JsonConfigProvider;
import org.apache.druid.guice.LazySingleton;
import org.apache.druid.guice.LifecycleModule;
import org.apache.druid.guice.annotations.Self;
import org.apache.druid.initialization.Initialization;
import org.apache.druid.java.util.http.client.HttpClient;
import org.apache.druid.java.util.http.client.HttpClientConfig;
import org.apache.druid.java.util.http.client.HttpClientInit;
import org.apache.druid.java.util.http.client.Request;
import org.apache.druid.java.util.http.client.response.InputStreamResponseHandler;
import org.apache.druid.java.util.http.client.response.StatusResponseHandler;
import org.apache.druid.java.util.http.client.response.StatusResponseHolder;
import org.apache.druid.metadata.PasswordProvider;
import org.apache.druid.server.DruidNode;
import org.apache.druid.server.initialization.jetty.JettyServerInitializer;
import org.apache.druid.server.initialization.jetty.JettyServerModule;
import org.apache.druid.server.initialization.jetty.ServletFilterHolder;
import org.apache.druid.server.security.AuthTestUtils;
import org.apache.druid.server.security.AuthorizerMapper;
import org.eclipse.jetty.server.Server;
import org.jboss.netty.handler.codec.http.HttpMethod;
import org.joda.time.Duration;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import javax.servlet.DispatcherType;
import javax.servlet.Filter;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.core.MediaType;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.StringWriter;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.EnumSet;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
public class JettyTest extends BaseJettyTest
{
@Rule
public TemporaryFolder folder = new TemporaryFolder();
private HttpClientConfig sslConfig;
private Injector injector;
private LatchedRequestStateHolder latchedRequestState;
@Override
protected Injector setupInjector()
{
TLSServerConfig tlsConfig;
try {
File keyStore = new File(JettyTest.class.getClassLoader().getResource("server.jks").getFile());
Path tmpKeyStore = Files.copy(keyStore.toPath(), new File(folder.newFolder(), "server.jks").toPath());
File trustStore = new File(JettyTest.class.getClassLoader().getResource("truststore.jks").getFile());
Path tmpTrustStore = Files.copy(trustStore.toPath(), new File(folder.newFolder(), "truststore.jks").toPath());
PasswordProvider pp = () -> "druid123";
tlsConfig = new TLSServerConfig()
{
@Override
public String getKeyStorePath()
{
return tmpKeyStore.toString();
}
@Override
public String getKeyStoreType()
{
return "jks";
}
@Override
public PasswordProvider getKeyStorePasswordProvider()
{
return pp;
}
@Override
public PasswordProvider getKeyManagerPasswordProvider()
{
return pp;
}
@Override
public String getTrustStorePath()
{
return tmpTrustStore.toString();
}
@Override
public String getTrustStoreAlgorithm()
{
return "PKIX";
}
@Override
public PasswordProvider getTrustStorePasswordProvider()
{
return pp;
}
@Override
public String getCertAlias()
{
return "druid";
}
@Override
public boolean isRequireClientCertificate()
{
return false;
}
@Override
public boolean isRequestClientCertificate()
{
return false;
}
@Override
public boolean isValidateHostnames()
{
return false;
}
};
sslConfig =
HttpClientConfig.builder()
.withSslContext(
HttpClientInit.sslContextWithTrustedKeyStore(tmpTrustStore.toString(), pp.getPassword())
)
.withWorkerCount(1)
.withReadTimeout(Duration.ZERO)
.build();
}
catch (IOException e) {
throw new RuntimeException(e);
}
final int ephemeralPort = ThreadLocalRandom.current().nextInt(49152, 65535);
latchedRequestState = new LatchedRequestStateHolder();
injector = Initialization.makeInjectorWithModules(
GuiceInjectors.makeStartupInjector(),
ImmutableList.<Module>of(
new Module()
{
@Override
public void configure(Binder binder)
{
JsonConfigProvider.bindInstance(
binder,
Key.get(DruidNode.class, Self.class),
new DruidNode("test", "localhost", false, ephemeralPort, ephemeralPort + 1, true, true)
);
binder.bind(TLSServerConfig.class).toInstance(tlsConfig);
binder.bind(JettyServerInitializer.class).to(JettyServerInit.class).in(LazySingleton.class);
binder.bind(LatchedRequestStateHolder.class).toInstance(latchedRequestState);
Multibinder<ServletFilterHolder> multibinder = Multibinder.newSetBinder(
binder,
ServletFilterHolder.class
);
multibinder.addBinding().toInstance(
new ServletFilterHolder()
{
@Override
public String getPath()
{
return "/*";
}
@Override
public Map<String, String> getInitParameters()
{
return null;
}
@Override
public Class<? extends Filter> getFilterClass()
{
return DummyAuthFilter.class;
}
@Override
public Filter getFilter()
{
return null;
}
@Override
public EnumSet<DispatcherType> getDispatcherType()
{
return null;
}
}
);
Jerseys.addResource(binder, SlowResource.class);
Jerseys.addResource(binder, LatchedResource.class);
Jerseys.addResource(binder, ExceptionResource.class);
Jerseys.addResource(binder, DefaultResource.class);
Jerseys.addResource(binder, DirectlyReturnResource.class);
binder.bind(AuthorizerMapper.class).toInstance(AuthTestUtils.TEST_AUTHORIZER_MAPPER);
LifecycleModule.register(binder, Server.class);
}
}
)
);
return injector;
}
@Test
@Ignore // this test will deadlock if it hits an issue, so ignored by default
public void testTimeouts() throws Exception
{
// test for request timeouts properly not locking up all threads
final Executor executor = Executors.newFixedThreadPool(100);
final AtomicLong count = new AtomicLong(0);
final CountDownLatch latch = new CountDownLatch(1000);
for (int i = 0; i < 10000; i++) {
executor.execute(
new Runnable()
{
@Override
public void run()
{
executor.execute(
new Runnable()
{
@Override
public void run()
{
long startTime = System.currentTimeMillis();
long startTime2 = 0;
try {
ListenableFuture<StatusResponseHolder> go =
client.go(
new Request(HttpMethod.GET, new URL("http://localhost:" + port + "/slow/hello")),
StatusResponseHandler.getInstance()
);
startTime2 = System.currentTimeMillis();
go.get();
}
catch (Exception e) {
e.printStackTrace();
}
finally {
System.out.printf(
Locale.ENGLISH,
"Response time client%dtime taken for getting future%dCounter %d%n",
System.currentTimeMillis() - startTime,
System.currentTimeMillis() - startTime2,
count.incrementAndGet()
);
latch.countDown();
}
}
}
);
}
}
);
}
latch.await();
}
@Test
public void testGzipResponseCompression() throws Exception
{
final URL url = new URL("http://localhost:" + port + "/default");
final HttpURLConnection get = (HttpURLConnection) url.openConnection();
get.setRequestProperty("Accept-Encoding", "gzip");
Assert.assertEquals("gzip", get.getContentEncoding());
Assert.assertEquals(
DEFAULT_RESPONSE_CONTENT,
IOUtils.toString(new GZIPInputStream(get.getInputStream()), StandardCharsets.UTF_8)
);
final HttpURLConnection post = (HttpURLConnection) url.openConnection();
post.setRequestProperty("Accept-Encoding", "gzip");
post.setRequestMethod("POST");
Assert.assertEquals("gzip", post.getContentEncoding());
Assert.assertEquals(
DEFAULT_RESPONSE_CONTENT,
IOUtils.toString(new GZIPInputStream(post.getInputStream()), StandardCharsets.UTF_8)
);
final HttpURLConnection getNoGzip = (HttpURLConnection) url.openConnection();
Assert.assertNotEquals("gzip", getNoGzip.getContentEncoding());
Assert.assertEquals(DEFAULT_RESPONSE_CONTENT, IOUtils.toString(getNoGzip.getInputStream(), StandardCharsets.UTF_8));
final HttpURLConnection postNoGzip = (HttpURLConnection) url.openConnection();
postNoGzip.setRequestMethod("POST");
Assert.assertNotEquals("gzip", postNoGzip.getContentEncoding());
Assert.assertEquals(
DEFAULT_RESPONSE_CONTENT,
IOUtils.toString(postNoGzip.getInputStream(), StandardCharsets.UTF_8)
);
}
// Tests that threads are not stuck when partial chunk is not finalized
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=424107
@Test
@Ignore
// above bug is not fixed in jetty for gzip encoding, and the chunk is still finalized instead of throwing exception.
public void testChunkNotFinalized() throws Exception
{
ListenableFuture<InputStream> go =
client.go(
new Request(HttpMethod.GET, new URL("http://localhost:" + port + "/exception/exception")),
new InputStreamResponseHandler()
);
try {
StringWriter writer = new StringWriter();
IOUtils.copy(go.get(), writer, "utf-8");
Assert.fail("Should have thrown Exception");
}
catch (IOException e) {
// Expected.
}
}
@Test
public void testThreadNotStuckOnException() throws Exception
{
final CountDownLatch latch = new CountDownLatch(1);
Executors.newSingleThreadExecutor().execute(
new Runnable()
{
@Override
public void run()
{
try {
ListenableFuture<InputStream> go = client.go(
new Request(HttpMethod.GET, new URL("http://localhost:" + port + "/exception/exception")),
new InputStreamResponseHandler()
);
StringWriter writer = new StringWriter();
IOUtils.copy(go.get(), writer, "utf-8");
}
catch (IOException e) {
// Expected.
}
catch (Throwable t) {
throw new RuntimeException(t);
}
latch.countDown();
}
}
);
latch.await(5, TimeUnit.SECONDS);
}
@Test
public void testExtensionAuthFilter() throws Exception
{
URL url = new URL("http://localhost:" + port + "/default");
HttpURLConnection get = (HttpURLConnection) url.openConnection();
get.setRequestProperty(DummyAuthFilter.AUTH_HDR, DummyAuthFilter.SECRET_USER);
Assert.assertEquals(HttpServletResponse.SC_OK, get.getResponseCode());
get = (HttpURLConnection) url.openConnection();
get.setRequestProperty(DummyAuthFilter.AUTH_HDR, "hacker");
Assert.assertEquals(HttpServletResponse.SC_UNAUTHORIZED, get.getResponseCode());
}
@Test
public void testGzipRequestDecompression() throws Exception
{
String text = "hello";
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (GZIPOutputStream gzipOutputStream = new GZIPOutputStream(out)) {
gzipOutputStream.write(text.getBytes(Charset.defaultCharset()));
}
Request request = new Request(HttpMethod.POST, new URL("http://localhost:" + port + "/return"));
request.setHeader("Content-Encoding", "gzip");
request.setContent(MediaType.TEXT_PLAIN, out.toByteArray());
Assert.assertEquals(text, new String(IOUtils.toByteArray(client.go(
request,
new InputStreamResponseHandler()
).get()), Charset.defaultCharset()));
}
@Test
public void testNumConnectionsMetricHttp() throws Exception
{
String text = "hello";
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (GZIPOutputStream gzipOutputStream = new GZIPOutputStream(out)) {
gzipOutputStream.write(text.getBytes(Charset.defaultCharset()));
}
Request request = new Request(HttpMethod.GET, new URL("http://localhost:" + port + "/latched/hello"));
request.setHeader("Content-Encoding", "gzip");
request.setContent(MediaType.TEXT_PLAIN, out.toByteArray());
JettyServerModule jsm = injector.getInstance(JettyServerModule.class);
latchedRequestState.reset();
waitForJettyServerModuleActiveConnectionsZero(jsm);
Assert.assertEquals(0, jsm.getActiveConnections());
ListenableFuture<InputStream> go = client.go(
request,
new InputStreamResponseHandler()
);
latchedRequestState.clientWaitForServerToStartRequest();
Assert.assertEquals(1, jsm.getActiveConnections());
latchedRequestState.clientReadyToFinishRequest();
go.get();
waitForJettyServerModuleActiveConnectionsZero(jsm);
Assert.assertEquals(0, jsm.getActiveConnections());
}
@Test
public void testNumConnectionsMetricHttps() throws Exception
{
String text = "hello";
ByteArrayOutputStream out = new ByteArrayOutputStream();
try (GZIPOutputStream gzipOutputStream = new GZIPOutputStream(out)) {
gzipOutputStream.write(text.getBytes(Charset.defaultCharset()));
}
Request request = new Request(HttpMethod.GET, new URL("https://localhost:" + tlsPort + "/latched/hello"));
request.setHeader("Content-Encoding", "gzip");
request.setContent(MediaType.TEXT_PLAIN, out.toByteArray());
HttpClient client;
try {
client = HttpClientInit.createClient(
sslConfig,
lifecycle
);
}
catch (Exception e) {
throw new RuntimeException(e);
}
JettyServerModule jsm = injector.getInstance(JettyServerModule.class);
latchedRequestState.reset();
waitForJettyServerModuleActiveConnectionsZero(jsm);
Assert.assertEquals(0, jsm.getActiveConnections());
ListenableFuture<InputStream> go = client.go(
request,
new InputStreamResponseHandler()
);
latchedRequestState.clientWaitForServerToStartRequest();
Assert.assertEquals(1, jsm.getActiveConnections());
latchedRequestState.clientReadyToFinishRequest();
go.get();
waitForJettyServerModuleActiveConnectionsZero(jsm);
Assert.assertEquals(0, jsm.getActiveConnections());
}
private void waitForJettyServerModuleActiveConnectionsZero(JettyServerModule jsm) throws InterruptedException
{
// it can take a bit to close the connection, so maybe sleep for a while and hope it closes
final int sleepTimeMills = 10;
final int totalSleeps = 15_000 / sleepTimeMills;
int count = 0;
while (jsm.getActiveConnections() > 0 && count++ < totalSleeps) {
Thread.sleep(sleepTimeMills);
}
if (jsm.getActiveConnections() > 0) {
throw new RuntimeException("Connections greater than 0. activeConnections=" + jsm.getActiveConnections() + " port=" + port);
}
}
}
| apache-2.0 |
kohsuke/jboss-marshalling | serial/src/main/java/org/jboss/marshalling/serial/ExternalizedObject.java | 3023 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.marshalling.serial;
import java.io.Externalizable;
import java.io.ObjectOutput;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.InvalidClassException;
import java.io.ObjectStreamException;
import org.jboss.marshalling.Externalizer;
/**
* An externalized object. This wrapper allows an object that was written with an {@code Externalizer} to be read by
* standard Java serialization. Note that if an externalized object's child object graph ever refers to the original
* object, there will be an error in the reconstructed object graph such that those references will refer to this
* wrapper object rather than the properly externalized object.
*/
public final class ExternalizedObject implements Externalizable {
private static final long serialVersionUID = -7764783599281227099L;
private Externalizer externalizer;
private transient Object obj;
public ExternalizedObject() {
}
public ExternalizedObject(final Externalizer externalizer, final Object obj) {
this.externalizer = externalizer;
this.obj = obj;
}
/**
* {@inheritDoc}
*/
public void writeExternal(final ObjectOutput out) throws IOException {
out.writeObject(obj.getClass());
out.writeObject(externalizer);
externalizer.writeExternal(obj, out);
}
/**
* {@inheritDoc}
*/
public void readExternal(final ObjectInput in) throws IOException, ClassNotFoundException {
Class<?> subject = (Class<?>) in.readObject();
externalizer = (Externalizer) in.readObject();
final Object o = externalizer.createExternal(subject, in);
obj = o;
}
/**
* Return the externalized object after {@code readExternal()} completes.
*
* @return the externalized object
*
* @throws ObjectStreamException never
*/
protected Object readResolve() {
return obj;
}
/**
* {@inheritDoc}
*/
public <T> T create(final Class<T> clazz) throws InvalidClassException {
try {
return clazz.newInstance();
} catch (Exception e) {
final InvalidClassException ee = new InvalidClassException(clazz.getName(), e.getMessage());
ee.initCause(e);
throw ee;
}
}
}
| apache-2.0 |
forGGe/kaa | server/common/verifier-shared/src/main/java/org/kaaproject/kaa/server/common/verifier/AbstractKaaUserVerifier.java | 2194 | /*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.common.verifier;
import java.io.IOException;
import java.text.MessageFormat;
import org.apache.avro.specific.SpecificRecordBase;
import org.kaaproject.kaa.common.avro.AvroByteArrayConverter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractKaaUserVerifier<T extends SpecificRecordBase> implements UserVerifier {
private static final Logger LOG = LoggerFactory.getLogger(AbstractKaaUserVerifier.class);
@Override
public void init(UserVerifierContext context) throws UserVerifierLifecycleException{
LOG.info("Initializing user verifier with {}", context);
AvroByteArrayConverter<T> converter = new AvroByteArrayConverter<>(getConfigurationClass());
try {
T configuration = converter.fromByteArray(context.getVerifierDto().getRawConfiguration());
LOG.info("Initializing user verifier {} with {}", getClassName(), configuration);
init(context, configuration);
} catch (IOException e) {
LOG.error(MessageFormat.format("Failed to initialize user verifier {0}", getClassName()), e);
throw new UserVerifierLifecycleException(e);
}
}
public abstract void init(UserVerifierContext context, T configuration) throws UserVerifierLifecycleException;
/**
* Gets the configuration class.
*
* @return the configuration class
*/
public abstract Class<T> getConfigurationClass();
private String getClassName() {
return this.getClass().getName();
}
}
| apache-2.0 |
tufangorel/hazelcast | hazelcast/src/main/java/com/hazelcast/spi/ServiceNamespace.java | 1016 | /*
* Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.spi;
import com.hazelcast.nio.serialization.DataSerializable;
/**
* {@code ServiceNamespace} is a namespace to group objects, structures, fragments within a service.
*
* @since 3.9
*/
public interface ServiceNamespace extends DataSerializable {
/**
* Name of the service
*
* @return name of the service
*/
String getServiceName();
}
| apache-2.0 |
facebook/buck | src/com/facebook/buck/cxx/OmnibusRoots.java | 6108 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.rules.ActionGraphBuilder;
import com.facebook.buck.core.util.graph.AbstractBreadthFirstTraversal;
import com.facebook.buck.core.util.immutables.BuckStyleValue;
import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkTarget;
import com.facebook.buck.cxx.toolchain.nativelink.NativeLinkable;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Optional;
/**
* A helper class for building the included and excluded omnibus roots to pass to the omnibus
* builder.
*/
@BuckStyleValue
public abstract class OmnibusRoots {
/** @return the {@link NativeLinkTarget} roots that are included in omnibus linking. */
public abstract ImmutableMap<BuildTarget, NativeLinkTarget> getIncludedRoots();
/** @return the {@link NativeLinkable} roots that are excluded from omnibus linking. */
public abstract ImmutableMap<BuildTarget, NativeLinkable> getExcludedRoots();
public static Builder builder(
ImmutableSet<BuildTarget> excludes, ActionGraphBuilder graphBuilder) {
return new Builder(excludes, graphBuilder);
}
public static class Builder {
private final ImmutableSet<BuildTarget> excludes;
private final ActionGraphBuilder graphBuilder;
private final Map<BuildTarget, NativeLinkTarget> includedRoots = new LinkedHashMap<>();
private final Map<BuildTarget, NativeLinkable> excludedRoots = new LinkedHashMap<>();
private Builder(ImmutableSet<BuildTarget> excludes, ActionGraphBuilder graphBuilder) {
this.excludes = excludes;
this.graphBuilder = graphBuilder;
}
/** Add a root which is included in omnibus linking. */
public void addIncludedRoot(NativeLinkTarget root) {
includedRoots.put(root.getBuildTarget(), root);
}
/** Add a root which is excluded from omnibus linking. */
public void addExcludedRoot(NativeLinkable root) {
excludedRoots.put(root.getBuildTarget(), root);
}
/**
* Add a node which may qualify as either an included root, and excluded root, or neither.
*
* @return whether the node was added as a root.
*/
public void addPotentialRoot(NativeLinkable node, boolean includePrivateLinkerFlags) {
Optional<NativeLinkTarget> target =
node.getNativeLinkTarget(graphBuilder, includePrivateLinkerFlags);
if (target.isPresent()
&& !excludes.contains(node.getBuildTarget())
&& node.supportsOmnibusLinking()) {
addIncludedRoot(target.get());
} else {
addExcludedRoot(node);
}
}
private ImmutableMap<BuildTarget, NativeLinkable> buildExcluded() {
Map<BuildTarget, NativeLinkable> excluded = new LinkedHashMap<>(excludedRoots);
// Find all excluded nodes reachable from the included roots.
Map<BuildTarget, NativeLinkable> includedRootDeps = new LinkedHashMap<>();
for (NativeLinkTarget target : includedRoots.values()) {
for (NativeLinkable linkable : target.getNativeLinkTargetDeps(graphBuilder)) {
includedRootDeps.put(linkable.getBuildTarget(), linkable);
}
}
new AbstractBreadthFirstTraversal<NativeLinkable>(includedRootDeps.values()) {
@Override
public Iterable<NativeLinkable> visit(NativeLinkable linkable) throws RuntimeException {
if (!linkable.supportsOmnibusLinking()) {
excluded.put(linkable.getBuildTarget(), linkable);
return ImmutableSet.of();
}
return Iterables.concat(
linkable.getNativeLinkableDeps(graphBuilder),
linkable.getNativeLinkableExportedDeps(graphBuilder));
}
}.start();
// Prepare the final map of excluded roots, starting with the pre-defined ones.
Map<BuildTarget, NativeLinkable> updatedExcludedRoots = new LinkedHashMap<>(excludedRoots);
// Recursively expand the excluded nodes including any preloaded deps, as we'll need this full
// list to know which roots to exclude from omnibus linking.
new AbstractBreadthFirstTraversal<NativeLinkable>(excluded.values()) {
@Override
public Iterable<NativeLinkable> visit(NativeLinkable linkable) {
if (includedRoots.containsKey(linkable.getBuildTarget())) {
updatedExcludedRoots.put(linkable.getBuildTarget(), linkable);
}
return Iterables.concat(
linkable.getNativeLinkableDeps(graphBuilder),
linkable.getNativeLinkableExportedDeps(graphBuilder));
}
}.start();
return ImmutableMap.copyOf(updatedExcludedRoots);
}
private ImmutableMap<BuildTarget, NativeLinkTarget> buildIncluded(
ImmutableSet<BuildTarget> excluded) {
return ImmutableMap.copyOf(
Maps.filterKeys(includedRoots, Predicates.not(excluded::contains)));
}
public boolean isEmpty() {
return includedRoots.isEmpty() && excludedRoots.isEmpty();
}
public OmnibusRoots build() {
ImmutableMap<BuildTarget, NativeLinkable> excluded = buildExcluded();
ImmutableMap<BuildTarget, NativeLinkTarget> included = buildIncluded(excluded.keySet());
return ImmutableOmnibusRoots.of(included, excluded);
}
}
}
| apache-2.0 |
goldmansachs/reladomo | reladomo/src/test/java/com/gs/fw/common/mithra/test/domain/GsDeskDatabaseObject.java | 716 |
/*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package com.gs.fw.common.mithra.test.domain;
public class GsDeskDatabaseObject extends GsDeskDatabaseObjectAbstract
{
}
| apache-2.0 |
sidgoyal/standards.jsr352.jbatch | com.ibm.jbatch.container/src/main/java/com/ibm/jbatch/container/util/TCCLObjectInputStream.java | 1450 | /**
* Copyright 2012 International Business Machines Corp.
*
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. Licensed under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.jbatch.container.util;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectStreamClass;
import com.ibm.jbatch.container.exception.BatchContainerRuntimeException;
public class TCCLObjectInputStream extends ObjectInputStream {
public TCCLObjectInputStream(InputStream in) throws IOException {
super(in);
}
@Override
public Class<?> resolveClass(ObjectStreamClass desc) {
ClassLoader tccl = Thread.currentThread().getContextClassLoader();
try {
return tccl.loadClass(desc.getName());
} catch (ClassNotFoundException e) {
throw new BatchContainerRuntimeException(e);
}
}
}
| apache-2.0 |
Wesley-Lawrence/nifi | nifi-nar-bundles/nifi-cdc/nifi-cdc-mysql-bundle/nifi-cdc-mysql-processors/src/main/java/org/apache/nifi/cdc/mysql/processors/CaptureChangeMySQL.java | 60260 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.cdc.mysql.processors;
import com.github.shyiko.mysql.binlog.BinaryLogClient;
import com.github.shyiko.mysql.binlog.event.Event;
import com.github.shyiko.mysql.binlog.event.EventHeaderV4;
import com.github.shyiko.mysql.binlog.event.EventType;
import com.github.shyiko.mysql.binlog.event.QueryEventData;
import com.github.shyiko.mysql.binlog.event.RotateEventData;
import com.github.shyiko.mysql.binlog.event.TableMapEventData;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.behavior.InputRequirement;
import org.apache.nifi.annotation.behavior.Stateful;
import org.apache.nifi.annotation.behavior.TriggerSerially;
import org.apache.nifi.annotation.behavior.WritesAttribute;
import org.apache.nifi.annotation.behavior.WritesAttributes;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnShutdown;
import org.apache.nifi.annotation.lifecycle.OnStopped;
import org.apache.nifi.cdc.CDCException;
import org.apache.nifi.cdc.event.ColumnDefinition;
import org.apache.nifi.cdc.event.RowEventException;
import org.apache.nifi.cdc.event.TableInfo;
import org.apache.nifi.cdc.event.TableInfoCacheKey;
import org.apache.nifi.cdc.event.io.EventWriter;
import org.apache.nifi.cdc.mysql.event.BeginTransactionEventInfo;
import org.apache.nifi.cdc.mysql.event.BinlogEventInfo;
import org.apache.nifi.cdc.mysql.event.BinlogEventListener;
import org.apache.nifi.cdc.mysql.event.BinlogLifecycleListener;
import org.apache.nifi.cdc.mysql.event.CommitTransactionEventInfo;
import org.apache.nifi.cdc.mysql.event.DeleteRowsEventInfo;
import org.apache.nifi.cdc.mysql.event.InsertRowsEventInfo;
import org.apache.nifi.cdc.mysql.event.RawBinlogEvent;
import org.apache.nifi.cdc.mysql.event.DDLEventInfo;
import org.apache.nifi.cdc.mysql.event.UpdateRowsEventInfo;
import org.apache.nifi.cdc.mysql.event.io.BeginTransactionEventWriter;
import org.apache.nifi.cdc.mysql.event.io.CommitTransactionEventWriter;
import org.apache.nifi.cdc.mysql.event.io.DeleteRowsWriter;
import org.apache.nifi.cdc.mysql.event.io.InsertRowsWriter;
import org.apache.nifi.cdc.mysql.event.io.DDLEventWriter;
import org.apache.nifi.cdc.mysql.event.io.UpdateRowsWriter;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.PropertyValue;
import org.apache.nifi.components.state.Scope;
import org.apache.nifi.components.state.StateManager;
import org.apache.nifi.components.state.StateMap;
import org.apache.nifi.distributed.cache.client.Deserializer;
import org.apache.nifi.distributed.cache.client.DistributedMapCacheClient;
import org.apache.nifi.distributed.cache.client.Serializer;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.processor.AbstractSessionFactoryProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.ProcessSessionFactory;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.util.file.classloader.ClassLoaderUtils;
import java.io.IOException;
import java.net.ConnectException;
import java.net.InetSocketAddress;
import java.net.MalformedURLException;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.DriverPropertyInfo;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import static com.github.shyiko.mysql.binlog.event.EventType.DELETE_ROWS;
import static com.github.shyiko.mysql.binlog.event.EventType.EXT_DELETE_ROWS;
import static com.github.shyiko.mysql.binlog.event.EventType.EXT_WRITE_ROWS;
import static com.github.shyiko.mysql.binlog.event.EventType.FORMAT_DESCRIPTION;
import static com.github.shyiko.mysql.binlog.event.EventType.PRE_GA_DELETE_ROWS;
import static com.github.shyiko.mysql.binlog.event.EventType.PRE_GA_WRITE_ROWS;
import static com.github.shyiko.mysql.binlog.event.EventType.ROTATE;
import static com.github.shyiko.mysql.binlog.event.EventType.WRITE_ROWS;
/**
* A processor to retrieve Change Data Capture (CDC) events and send them as flow files.
*/
@TriggerSerially
@InputRequirement(InputRequirement.Requirement.INPUT_FORBIDDEN)
@Tags({"sql", "jdbc", "cdc", "mysql"})
@CapabilityDescription("Retrieves Change Data Capture (CDC) events from a MySQL database. CDC Events include INSERT, UPDATE, DELETE operations. Events "
+ "are output as individual flow files ordered by the time at which the operation occurred.")
@Stateful(scopes = Scope.CLUSTER, description = "Information such as a 'pointer' to the current CDC event in the database is stored by this processor, such "
+ "that it can continue from the same location if restarted.")
@WritesAttributes({
@WritesAttribute(attribute = EventWriter.SEQUENCE_ID_KEY, description = "A sequence identifier (i.e. strictly increasing integer value) specifying the order "
+ "of the CDC event flow file relative to the other event flow file(s)."),
@WritesAttribute(attribute = EventWriter.CDC_EVENT_TYPE_ATTRIBUTE, description = "A string indicating the type of CDC event that occurred, including (but not limited to) "
+ "'begin', 'insert', 'update', 'delete', 'ddl' and 'commit'."),
@WritesAttribute(attribute = "mime.type", description = "The processor outputs flow file content in JSON format, and sets the mime.type attribute to "
+ "application/json")
})
public class CaptureChangeMySQL extends AbstractSessionFactoryProcessor {
// Random invalid constant used as an indicator to not set the binlog position on the client (thereby using the latest available)
private static final int DO_NOT_SET = -1000;
// Relationships
public static final Relationship REL_SUCCESS = new Relationship.Builder()
.name("success")
.description("Successfully created FlowFile from SQL query result set.")
.build();
protected static Set<Relationship> relationships;
// Properties
public static final PropertyDescriptor DATABASE_NAME_PATTERN = new PropertyDescriptor.Builder()
.name("capture-change-mysql-db-name-pattern")
.displayName("Database/Schema Name Pattern")
.description("A regular expression (regex) for matching databases (or schemas, depending on your RDBMS' terminology) against the list of CDC events. The regex must match "
+ "the database name as it is stored in the RDBMS. If the property is not set, the database name will not be used to filter the CDC events. "
+ "NOTE: DDL events, even if they affect different databases, are associated with the database used by the session to execute the DDL. "
+ "This means if a connection is made to one database, but the DDL is issued against another, then the connected database will be the one matched against "
+ "the specified pattern.")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor TABLE_NAME_PATTERN = new PropertyDescriptor.Builder()
.name("capture-change-mysql-name-pattern")
.displayName("Table Name Pattern")
.description("A regular expression (regex) for matching CDC events affecting matching tables. The regex must match the table name as it is stored in the database. "
+ "If the property is not set, no events will be filtered based on table name.")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
public static final PropertyDescriptor CONNECT_TIMEOUT = new PropertyDescriptor.Builder()
.name("capture-change-mysql-max-wait-time")
.displayName("Max Wait Time")
.description("The maximum amount of time allowed for a connection to be established, zero means there is effectively no limit.")
.defaultValue("30 seconds")
.required(true)
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor HOSTS = new PropertyDescriptor.Builder()
.name("capture-change-mysql-hosts")
.displayName("MySQL Hosts")
.description("A list of hostname/port entries corresponding to nodes in a MySQL cluster. The entries should be comma separated "
+ "using a colon such as host1:port,host2:port,.... For example mysql.myhost.com:3306. This processor will attempt to connect to "
+ "the hosts in the list in order. If one node goes down and failover is enabled for the cluster, then the processor will connect "
+ "to the active node (assuming its host entry is specified in this property. The default port for MySQL connections is 3306.")
.required(true)
.addValidator(StandardValidators.HOSTNAME_PORT_LIST_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor DRIVER_NAME = new PropertyDescriptor.Builder()
.name("capture-change-mysql-driver-class")
.displayName("MySQL Driver Class Name")
.description("The class name of the MySQL database driver class")
.defaultValue("com.mysql.jdbc.Driver")
.required(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor DRIVER_LOCATION = new PropertyDescriptor.Builder()
.name("capture-change-mysql-driver-locations")
.displayName("MySQL Driver Location(s)")
.description("Comma-separated list of files/folders and/or URLs containing the MySQL driver JAR and its dependencies (if any). "
+ "For example '/var/tmp/mysql-connector-java-5.1.38-bin.jar'")
.defaultValue(null)
.required(false)
.addValidator(StandardValidators.createListValidator(true, true, StandardValidators.createURLorFileValidator()))
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor USERNAME = new PropertyDescriptor.Builder()
.name("capture-change-mysql-username")
.displayName("Username")
.description("Username to access the MySQL cluster")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor PASSWORD = new PropertyDescriptor.Builder()
.name("capture-change-mysql-password")
.displayName("Password")
.description("Password to access the MySQL cluster")
.required(false)
.sensitive(true)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor SERVER_ID = new PropertyDescriptor.Builder()
.name("capture-change-mysql-server-id")
.displayName("Server ID")
.description("The client connecting to the MySQL replication group is actually a simplified slave (server), and the Server ID value must be unique across the whole replication "
+ "group (i.e. different from any other Server ID being used by any master or slave). Thus, each instance of CaptureChangeMySQL must have a Server ID unique across "
+ "the replication group. If the Server ID is not specified, it defaults to 65535.")
.required(false)
.addValidator(StandardValidators.POSITIVE_LONG_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor DIST_CACHE_CLIENT = new PropertyDescriptor.Builder()
.name("capture-change-mysql-dist-map-cache-client")
.displayName("Distributed Map Cache Client")
.description("Identifies a Distributed Map Cache Client controller service to be used for keeping information about the various tables, columns, etc. "
+ "needed by the processor. If a client is not specified, the generated events will not include column type or name information.")
.identifiesControllerService(DistributedMapCacheClient.class)
.required(false)
.build();
public static final PropertyDescriptor RETRIEVE_ALL_RECORDS = new PropertyDescriptor.Builder()
.name("capture-change-mysql-retrieve-all-records")
.displayName("Retrieve All Records")
.description("Specifies whether to get all available CDC events, regardless of the current binlog filename and/or position. If binlog filename and position values are present "
+ "in the processor's State, this property's value is ignored. This allows for 4 different configurations: 1) If binlog data is available in processor State, that is used "
+ "to determine the start location and the value of Retrieve All Records is ignored. 2) If no binlog data is in processor State, then Retrieve All Records set to true "
+ "means start at the beginning of the binlog history. 3) If no binlog data is in processor State and Initial Binlog Filename/Position are not set, then "
+ "Retrieve All Records set to false means start at the end of the binlog history. 4) If no binlog data is in processor State and Initial Binlog Filename/Position "
+ "are set, then Retrieve All Records set to false means start at the specified initial binlog file/position. "
+ "To reset the behavior, clear the processor state (refer to the State Management section of the processor's documentation).")
.required(true)
.allowableValues("true", "false")
.defaultValue("true")
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
.build();
public static final PropertyDescriptor INCLUDE_BEGIN_COMMIT = new PropertyDescriptor.Builder()
.name("capture-change-mysql-include-begin-commit")
.displayName("Include Begin/Commit Events")
.description("Specifies whether to emit events corresponding to a BEGIN or COMMIT event in the binary log. Set to true if the BEGIN/COMMIT events are necessary in the downstream flow, "
+ "otherwise set to false, which suppresses generation of these events and can increase flow performance.")
.required(true)
.allowableValues("true", "false")
.defaultValue("false")
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
.build();
public static final PropertyDescriptor INCLUDE_DDL_EVENTS = new PropertyDescriptor.Builder()
.name("capture-change-mysql-include-ddl-events")
.displayName("Include DDL Events")
.description("Specifies whether to emit events corresponding to Data Definition Language (DDL) events such as ALTER TABLE, TRUNCATE TABLE, e.g. in the binary log. Set to true "
+ "if the DDL events are desired/necessary in the downstream flow, otherwise set to false, which suppresses generation of these events and can increase flow performance.")
.required(true)
.allowableValues("true", "false")
.defaultValue("false")
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
.build();
public static final PropertyDescriptor STATE_UPDATE_INTERVAL = new PropertyDescriptor.Builder()
.name("capture-change-mysql-state-update-interval")
.displayName("State Update Interval")
.description("Indicates how often to update the processor's state with binlog file/position values. A value of zero means that state will only be updated when the processor is "
+ "stopped or shutdown. If at some point the processor state does not contain the desired binlog values, the last flow file emitted will contain the last observed values, "
+ "and the processor can be returned to that state by using the Initial Binlog File, Initial Binlog Position, and Initial Sequence ID properties.")
.defaultValue("0 seconds")
.required(true)
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor INIT_SEQUENCE_ID = new PropertyDescriptor.Builder()
.name("capture-change-mysql-init-seq-id")
.displayName("Initial Sequence ID")
.description("Specifies an initial sequence identifier to use if this processor's State does not have a current "
+ "sequence identifier. If a sequence identifier is present in the processor's State, this property is ignored. Sequence identifiers are "
+ "monotonically increasing integers that record the order of flow files generated by the processor. They can be used with the EnforceOrder "
+ "processor to guarantee ordered delivery of CDC events.")
.required(false)
.addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor INIT_BINLOG_FILENAME = new PropertyDescriptor.Builder()
.name("capture-change-mysql-init-binlog-filename")
.displayName("Initial Binlog Filename")
.description("Specifies an initial binlog filename to use if this processor's State does not have a current binlog filename. If a filename is present "
+ "in the processor's State, this property is ignored. This can be used along with Initial Binlog Position to \"skip ahead\" if previous events are not desired. "
+ "Note that NiFi Expression Language is supported, but this property is evaluated when the processor is configured, so FlowFile attributes may not be used. Expression "
+ "Language is supported to enable the use of the Variable Registry and/or environment properties.")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
public static final PropertyDescriptor INIT_BINLOG_POSITION = new PropertyDescriptor.Builder()
.name("capture-change-mysql-init-binlog-position")
.displayName("Initial Binlog Position")
.description("Specifies an initial offset into a binlog (specified by Initial Binlog Filename) to use if this processor's State does not have a current "
+ "binlog filename. If a filename is present in the processor's State, this property is ignored. This can be used along with Initial Binlog Filename "
+ "to \"skip ahead\" if previous events are not desired. Note that NiFi Expression Language is supported, but this property is evaluated when the "
+ "processor is configured, so FlowFile attributes may not be used. Expression Language is supported to enable the use of the Variable Registry "
+ "and/or environment properties.")
.required(false)
.addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
private static List<PropertyDescriptor> propDescriptors;
private volatile ProcessSession currentSession;
private BinaryLogClient binlogClient;
private BinlogEventListener eventListener;
private BinlogLifecycleListener lifecycleListener;
private volatile LinkedBlockingQueue<RawBinlogEvent> queue = new LinkedBlockingQueue<>();
private volatile String currentBinlogFile = null;
private volatile long currentBinlogPosition = 4;
// The following variables save the value of the binlog filename and position (and sequence id) at the beginning of a transaction. Used for rollback
private volatile String xactBinlogFile = null;
private volatile long xactBinlogPosition = 4;
private volatile long xactSequenceId = 0;
private volatile TableInfo currentTable = null;
private volatile String currentDatabase = null;
private volatile Pattern databaseNamePattern;
private volatile Pattern tableNamePattern;
private volatile boolean includeBeginCommit = false;
private volatile boolean includeDDLEvents = false;
private volatile boolean inTransaction = false;
private volatile boolean skipTable = false;
private AtomicBoolean doStop = new AtomicBoolean(false);
private AtomicBoolean hasRun = new AtomicBoolean(false);
private int currentHost = 0;
private String transitUri = "<unknown>";
private volatile long lastStateUpdate = 0L;
private volatile long stateUpdateInterval = -1L;
private AtomicLong currentSequenceId = new AtomicLong(0);
private volatile DistributedMapCacheClient cacheClient = null;
private final Serializer<TableInfoCacheKey> cacheKeySerializer = new TableInfoCacheKey.Serializer();
private final Serializer<TableInfo> cacheValueSerializer = new TableInfo.Serializer();
private final Deserializer<TableInfo> cacheValueDeserializer = new TableInfo.Deserializer();
private Connection jdbcConnection = null;
private final BeginTransactionEventWriter beginEventWriter = new BeginTransactionEventWriter();
private final CommitTransactionEventWriter commitEventWriter = new CommitTransactionEventWriter();
private final DDLEventWriter ddlEventWriter = new DDLEventWriter();
private final InsertRowsWriter insertRowsWriter = new InsertRowsWriter();
private final DeleteRowsWriter deleteRowsWriter = new DeleteRowsWriter();
private final UpdateRowsWriter updateRowsWriter = new UpdateRowsWriter();
static {
final Set<Relationship> r = new HashSet<>();
r.add(REL_SUCCESS);
relationships = Collections.unmodifiableSet(r);
final List<PropertyDescriptor> pds = new ArrayList<>();
pds.add(HOSTS);
pds.add(DRIVER_NAME);
pds.add(DRIVER_LOCATION);
pds.add(USERNAME);
pds.add(PASSWORD);
pds.add(SERVER_ID);
pds.add(DATABASE_NAME_PATTERN);
pds.add(TABLE_NAME_PATTERN);
pds.add(CONNECT_TIMEOUT);
pds.add(DIST_CACHE_CLIENT);
pds.add(RETRIEVE_ALL_RECORDS);
pds.add(INCLUDE_BEGIN_COMMIT);
pds.add(INCLUDE_DDL_EVENTS);
pds.add(STATE_UPDATE_INTERVAL);
pds.add(INIT_SEQUENCE_ID);
pds.add(INIT_BINLOG_FILENAME);
pds.add(INIT_BINLOG_POSITION);
propDescriptors = Collections.unmodifiableList(pds);
}
@Override
public Set<Relationship> getRelationships() {
return relationships;
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return propDescriptors;
}
public void setup(ProcessContext context) {
final ComponentLog logger = getLogger();
final StateManager stateManager = context.getStateManager();
final StateMap stateMap;
try {
stateMap = stateManager.getState(Scope.CLUSTER);
} catch (final IOException ioe) {
logger.error("Failed to retrieve observed maximum values from the State Manager. Will not attempt "
+ "connection until this is accomplished.", ioe);
context.yield();
return;
}
PropertyValue dbNameValue = context.getProperty(DATABASE_NAME_PATTERN);
databaseNamePattern = dbNameValue.isSet() ? Pattern.compile(dbNameValue.getValue()) : null;
PropertyValue tableNameValue = context.getProperty(TABLE_NAME_PATTERN);
tableNamePattern = tableNameValue.isSet() ? Pattern.compile(tableNameValue.getValue()) : null;
stateUpdateInterval = context.getProperty(STATE_UPDATE_INTERVAL).evaluateAttributeExpressions().asTimePeriod(TimeUnit.MILLISECONDS);
boolean getAllRecords = context.getProperty(RETRIEVE_ALL_RECORDS).asBoolean();
includeBeginCommit = context.getProperty(INCLUDE_BEGIN_COMMIT).asBoolean();
includeDDLEvents = context.getProperty(INCLUDE_DDL_EVENTS).asBoolean();
// Set current binlog filename to whatever is in State, falling back to the Retrieve All Records then Initial Binlog Filename if no State variable is present
currentBinlogFile = stateMap.get(BinlogEventInfo.BINLOG_FILENAME_KEY);
if (currentBinlogFile == null) {
if (!getAllRecords) {
if (context.getProperty(INIT_BINLOG_FILENAME).isSet()) {
currentBinlogFile = context.getProperty(INIT_BINLOG_FILENAME).evaluateAttributeExpressions().getValue();
}
} else {
// If we're starting from the beginning of all binlogs, the binlog filename must be the empty string (not null)
currentBinlogFile = "";
}
}
// Set current binlog position to whatever is in State, falling back to the Retrieve All Records then Initial Binlog Filename if no State variable is present
String binlogPosition = stateMap.get(BinlogEventInfo.BINLOG_POSITION_KEY);
if (binlogPosition != null) {
currentBinlogPosition = Long.valueOf(binlogPosition);
} else if (!getAllRecords) {
if (context.getProperty(INIT_BINLOG_POSITION).isSet()) {
currentBinlogPosition = context.getProperty(INIT_BINLOG_POSITION).evaluateAttributeExpressions().asLong();
} else {
currentBinlogPosition = DO_NOT_SET;
}
} else {
currentBinlogPosition = -1;
}
// Get current sequence ID from state
String seqIdString = stateMap.get(EventWriter.SEQUENCE_ID_KEY);
if (StringUtils.isEmpty(seqIdString)) {
// Use Initial Sequence ID property if none is found in state
PropertyValue seqIdProp = context.getProperty(INIT_SEQUENCE_ID);
if (seqIdProp.isSet()) {
currentSequenceId.set(seqIdProp.evaluateAttributeExpressions().asInteger());
}
} else {
currentSequenceId.set(Integer.parseInt(seqIdString));
}
// Get reference to Distributed Cache if one exists. If it does not, no enrichment (resolution of column names, e.g.) will be performed
boolean createEnrichmentConnection = false;
if (context.getProperty(DIST_CACHE_CLIENT).isSet()) {
cacheClient = context.getProperty(DIST_CACHE_CLIENT).asControllerService(DistributedMapCacheClient.class);
createEnrichmentConnection = true;
} else {
logger.warn("No Distributed Map Cache Client is specified, so no event enrichment (resolution of column names, e.g.) will be performed.");
cacheClient = null;
}
// Save off MySQL cluster and JDBC driver information, will be used to connect for event enrichment as well as for the binlog connector
try {
List<InetSocketAddress> hosts = getHosts(context.getProperty(HOSTS).evaluateAttributeExpressions().getValue());
String username = context.getProperty(USERNAME).evaluateAttributeExpressions().getValue();
String password = context.getProperty(PASSWORD).evaluateAttributeExpressions().getValue();
// BinaryLogClient expects a non-null password, so set it to the empty string if it is not provided
if (password == null) {
password = "";
}
long connectTimeout = context.getProperty(CONNECT_TIMEOUT).evaluateAttributeExpressions().asTimePeriod(TimeUnit.MILLISECONDS);
String driverLocation = context.getProperty(DRIVER_LOCATION).evaluateAttributeExpressions().getValue();
String driverName = context.getProperty(DRIVER_NAME).evaluateAttributeExpressions().getValue();
Long serverId = context.getProperty(SERVER_ID).evaluateAttributeExpressions().asLong();
connect(hosts, username, password, serverId, createEnrichmentConnection, driverLocation, driverName, connectTimeout);
} catch (IOException | IllegalStateException e) {
context.yield();
binlogClient = null;
throw new ProcessException(e.getMessage(), e);
}
}
@Override
public void onTrigger(ProcessContext context, ProcessSessionFactory sessionFactory) throws ProcessException {
// Indicate that this processor has executed at least once, so we know whether or not the state values are valid and should be updated
hasRun.set(true);
ComponentLog log = getLogger();
StateManager stateManager = context.getStateManager();
// Create a client if we don't have one
if (binlogClient == null) {
setup(context);
}
// If the client has been disconnected, try to reconnect
if (!binlogClient.isConnected()) {
Exception e = lifecycleListener.getException();
// If there's no exception, the listener callback might not have been executed yet, so try again later. Otherwise clean up and start over next time
if (e != null) {
// Communications failure, disconnect and try next time
log.error("Binlog connector communications failure: " + e.getMessage(), e);
try {
stop(stateManager);
} catch (CDCException ioe) {
throw new ProcessException(ioe);
}
}
// Try again later
context.yield();
return;
}
if (currentSession == null) {
currentSession = sessionFactory.createSession();
}
try {
outputEvents(currentSession, stateManager, log);
long now = System.currentTimeMillis();
long timeSinceLastUpdate = now - lastStateUpdate;
if (stateUpdateInterval != 0 && timeSinceLastUpdate >= stateUpdateInterval) {
updateState(stateManager, currentBinlogFile, currentBinlogPosition, currentSequenceId.get());
lastStateUpdate = now;
}
} catch (IOException ioe) {
try {
// Perform some processor-level "rollback", then rollback the session
currentBinlogFile = xactBinlogFile == null ? "" : xactBinlogFile;
currentBinlogPosition = xactBinlogPosition;
currentSequenceId.set(xactSequenceId);
inTransaction = false;
stop(stateManager);
queue.clear();
currentSession.rollback();
} catch (Exception e) {
// Not much we can recover from here
log.warn("Error occurred during rollback", e);
}
throw new ProcessException(ioe);
}
}
@OnStopped
public void onStopped(ProcessContext context) {
try {
stop(context.getStateManager());
} catch (CDCException ioe) {
throw new ProcessException(ioe);
}
}
@OnShutdown
public void onShutdown(ProcessContext context) {
try {
// In case we get shutdown while still running, save off the current state, disconnect, and shut down gracefully
stop(context.getStateManager());
} catch (CDCException ioe) {
throw new ProcessException(ioe);
}
}
/**
* Get a list of hosts from a NiFi property, e.g.
*
* @param hostsString A comma-separated list of hosts (host:port,host2:port2, etc.)
* @return List of InetSocketAddresses for the hosts
*/
private List<InetSocketAddress> getHosts(String hostsString) {
if (hostsString == null) {
return null;
}
final List<String> hostsSplit = Arrays.asList(hostsString.split(","));
List<InetSocketAddress> hostsList = new ArrayList<>();
for (String item : hostsSplit) {
String[] addresses = item.split(":");
if (addresses.length != 2) {
throw new ArrayIndexOutOfBoundsException("Not in host:port format");
}
hostsList.add(new InetSocketAddress(addresses[0].trim(), Integer.parseInt(addresses[1].trim())));
}
return hostsList;
}
protected void connect(List<InetSocketAddress> hosts, String username, String password, Long serverId, boolean createEnrichmentConnection,
String driverLocation, String driverName, long connectTimeout) throws IOException {
int connectionAttempts = 0;
final int numHosts = hosts.size();
InetSocketAddress connectedHost = null;
Exception lastConnectException = new Exception("Unknown connection error");
if (createEnrichmentConnection) {
try {
// Ensure driverLocation and driverName are correct before establishing binlog connection
// to avoid failing after binlog messages are received.
// Actual JDBC connection is created after binlog client gets started, because we need
// the connect-able host same as the binlog client.
registerDriver(driverLocation, driverName);
} catch (InitializationException e) {
throw new RuntimeException("Failed to register JDBC driver. Ensure MySQL Driver Location(s)" +
" and MySQL Driver Class Name are configured correctly. " + e, e);
}
}
while (connectedHost == null && connectionAttempts < numHosts) {
if (binlogClient == null) {
connectedHost = hosts.get(currentHost);
binlogClient = createBinlogClient(connectedHost.getHostString(), connectedHost.getPort(), username, password);
}
// Add an event listener and lifecycle listener for binlog and client events, respectively
if (eventListener == null) {
eventListener = createBinlogEventListener(binlogClient, queue);
}
eventListener.start();
binlogClient.registerEventListener(eventListener);
if (lifecycleListener == null) {
lifecycleListener = createBinlogLifecycleListener();
}
binlogClient.registerLifecycleListener(lifecycleListener);
binlogClient.setBinlogFilename(currentBinlogFile);
if (currentBinlogPosition != DO_NOT_SET) {
binlogClient.setBinlogPosition(currentBinlogPosition);
}
if (serverId != null) {
binlogClient.setServerId(serverId);
}
try {
if (connectTimeout == 0) {
connectTimeout = Long.MAX_VALUE;
}
binlogClient.connect(connectTimeout);
transitUri = "mysql://" + connectedHost.getHostString() + ":" + connectedHost.getPort();
} catch (IOException | TimeoutException te) {
// Try the next host
connectedHost = null;
transitUri = "<unknown>";
currentHost = (currentHost + 1) % numHosts;
connectionAttempts++;
lastConnectException = te;
}
}
if (!binlogClient.isConnected()) {
binlogClient.disconnect();
binlogClient = null;
throw new IOException("Could not connect binlog client to any of the specified hosts due to: " + lastConnectException.getMessage(), lastConnectException);
}
if (createEnrichmentConnection) {
try {
jdbcConnection = getJdbcConnection(driverLocation, driverName, connectedHost, username, password, null);
} catch (InitializationException | SQLException e) {
binlogClient.disconnect();
binlogClient = null;
throw new IOException("Error creating binlog enrichment JDBC connection to any of the specified hosts", e);
}
}
doStop.set(false);
}
public void outputEvents(ProcessSession session, StateManager stateManager, ComponentLog log) throws IOException {
RawBinlogEvent rawBinlogEvent;
// Drain the queue
while ((rawBinlogEvent = queue.poll()) != null && !doStop.get()) {
Event event = rawBinlogEvent.getEvent();
EventHeaderV4 header = event.getHeader();
long timestamp = header.getTimestamp();
EventType eventType = header.getEventType();
// Advance the current binlog position. This way if no more events are received and the processor is stopped, it will resume at the event about to be processed.
// We always get ROTATE and FORMAT_DESCRIPTION messages no matter where we start (even from the end), and they won't have the correct "next position" value, so only
// advance the position if it is not that type of event. ROTATE events don't generate output CDC events and have the current binlog position in a special field, which
// is filled in during the ROTATE case
if (eventType != ROTATE && eventType != FORMAT_DESCRIPTION) {
currentBinlogPosition = header.getPosition();
}
log.debug("Got message event type: {} ", new Object[]{header.getEventType().toString()});
switch (eventType) {
case TABLE_MAP:
// This is sent to inform which table is about to be changed by subsequent events
TableMapEventData data = event.getData();
// Should we skip this table? Yes if we've specified a DB or table name pattern and they don't match
skipTable = (databaseNamePattern != null && !databaseNamePattern.matcher(data.getDatabase()).matches())
|| (tableNamePattern != null && !tableNamePattern.matcher(data.getTable()).matches());
if (!skipTable) {
TableInfoCacheKey key = new TableInfoCacheKey(this.getIdentifier(), data.getDatabase(), data.getTable(), data.getTableId());
if (cacheClient != null) {
try {
currentTable = cacheClient.get(key, cacheKeySerializer, cacheValueDeserializer);
} catch (ConnectException ce) {
throw new IOException("Could not connect to Distributed Map Cache server to get table information", ce);
}
if (currentTable == null) {
// We don't have an entry for this table yet, so fetch the info from the database and populate the cache
try {
currentTable = loadTableInfo(key);
try {
cacheClient.put(key, currentTable, cacheKeySerializer, cacheValueSerializer);
} catch (ConnectException ce) {
throw new IOException("Could not connect to Distributed Map Cache server to put table information", ce);
}
} catch (SQLException se) {
// Propagate the error up, so things like rollback and logging/bulletins can be handled
throw new IOException(se.getMessage(), se);
}
}
}
} else {
// Clear the current table, to force a reload next time we get a TABLE_MAP event we care about
currentTable = null;
}
break;
case QUERY:
QueryEventData queryEventData = event.getData();
currentDatabase = queryEventData.getDatabase();
String sql = queryEventData.getSql();
// Is this the start of a transaction?
if ("BEGIN".equals(sql)) {
// If we're already in a transaction, something bad happened, alert the user
if (inTransaction) {
throw new IOException("BEGIN event received while already processing a transaction. This could indicate that your binlog position is invalid.");
}
// Mark the current binlog position in case we have to rollback the transaction (if the processor is stopped, e.g.)
xactBinlogFile = currentBinlogFile;
xactBinlogPosition = currentBinlogPosition;
xactSequenceId = currentSequenceId.get();
if (includeBeginCommit && (databaseNamePattern == null || databaseNamePattern.matcher(currentDatabase).matches())) {
BeginTransactionEventInfo beginEvent = new BeginTransactionEventInfo(currentDatabase, timestamp, currentBinlogFile, currentBinlogPosition);
currentSequenceId.set(beginEventWriter.writeEvent(currentSession, transitUri, beginEvent, currentSequenceId.get(), REL_SUCCESS));
}
inTransaction = true;
} else if ("COMMIT".equals(sql)) {
if (!inTransaction) {
throw new IOException("COMMIT event received while not processing a transaction (i.e. no corresponding BEGIN event). "
+ "This could indicate that your binlog position is invalid.");
}
// InnoDB generates XID events for "commit", but MyISAM generates Query events with "COMMIT", so handle that here
if (includeBeginCommit && (databaseNamePattern == null || databaseNamePattern.matcher(currentDatabase).matches())) {
CommitTransactionEventInfo commitTransactionEvent = new CommitTransactionEventInfo(currentDatabase, timestamp, currentBinlogFile, currentBinlogPosition);
currentSequenceId.set(commitEventWriter.writeEvent(currentSession, transitUri, commitTransactionEvent, currentSequenceId.get(), REL_SUCCESS));
}
// Commit the NiFi session
session.commit();
inTransaction = false;
currentTable = null;
} else {
// Check for DDL events (alter table, e.g.). Normalize the query to do string matching on the type of change
String normalizedQuery = sql.toLowerCase().trim().replaceAll(" {2,}", " ");
if (normalizedQuery.startsWith("alter table")
|| normalizedQuery.startsWith("alter ignore table")
|| normalizedQuery.startsWith("create table")
|| normalizedQuery.startsWith("truncate table")
|| normalizedQuery.startsWith("rename table")
|| normalizedQuery.startsWith("drop table")
|| normalizedQuery.startsWith("drop database")) {
if (includeDDLEvents && (databaseNamePattern == null || databaseNamePattern.matcher(currentDatabase).matches())) {
// If we don't have table information, we can still use the database name
TableInfo ddlTableInfo = (currentTable != null) ? currentTable : new TableInfo(currentDatabase, null, null, null);
DDLEventInfo ddlEvent = new DDLEventInfo(ddlTableInfo, timestamp, currentBinlogFile, currentBinlogPosition, sql);
currentSequenceId.set(ddlEventWriter.writeEvent(currentSession, transitUri, ddlEvent, currentSequenceId.get(), REL_SUCCESS));
}
// Remove all the keys from the cache that this processor added
if (cacheClient != null) {
cacheClient.removeByPattern(this.getIdentifier() + ".*");
}
// If not in a transaction, commit the session so the DDL event(s) will be transferred
if (includeDDLEvents && !inTransaction) {
session.commit();
}
}
}
break;
case XID:
if (!inTransaction) {
throw new IOException("COMMIT event received while not processing a transaction (i.e. no corresponding BEGIN event). "
+ "This could indicate that your binlog position is invalid.");
}
if (includeBeginCommit && (databaseNamePattern == null || databaseNamePattern.matcher(currentDatabase).matches())) {
CommitTransactionEventInfo commitTransactionEvent = new CommitTransactionEventInfo(currentDatabase, timestamp, currentBinlogFile, currentBinlogPosition);
currentSequenceId.set(commitEventWriter.writeEvent(currentSession, transitUri, commitTransactionEvent, currentSequenceId.get(), REL_SUCCESS));
}
// Commit the NiFi session
session.commit();
inTransaction = false;
currentTable = null;
currentDatabase = null;
break;
case WRITE_ROWS:
case EXT_WRITE_ROWS:
case PRE_GA_WRITE_ROWS:
case UPDATE_ROWS:
case EXT_UPDATE_ROWS:
case PRE_GA_UPDATE_ROWS:
case DELETE_ROWS:
case EXT_DELETE_ROWS:
case PRE_GA_DELETE_ROWS:
// If we are skipping this table, then don't emit any events related to its modification
if (skipTable) {
break;
}
if (!inTransaction) {
// These events should only happen inside a transaction, warn the user otherwise
log.warn("Table modification event occurred outside of a transaction.");
break;
}
if (currentTable == null && cacheClient != null) {
// No Table Map event was processed prior to this event, which should not happen, so throw an error
throw new RowEventException("No table information is available for this event, cannot process further.");
}
if (eventType == WRITE_ROWS
|| eventType == EXT_WRITE_ROWS
|| eventType == PRE_GA_WRITE_ROWS) {
InsertRowsEventInfo eventInfo = new InsertRowsEventInfo(currentTable, timestamp, currentBinlogFile, currentBinlogPosition, event.getData());
currentSequenceId.set(insertRowsWriter.writeEvent(currentSession, transitUri, eventInfo, currentSequenceId.get(), REL_SUCCESS));
} else if (eventType == DELETE_ROWS
|| eventType == EXT_DELETE_ROWS
|| eventType == PRE_GA_DELETE_ROWS) {
DeleteRowsEventInfo eventInfo = new DeleteRowsEventInfo(currentTable, timestamp, currentBinlogFile, currentBinlogPosition, event.getData());
currentSequenceId.set(deleteRowsWriter.writeEvent(currentSession, transitUri, eventInfo, currentSequenceId.get(), REL_SUCCESS));
} else {
// Update event
UpdateRowsEventInfo eventInfo = new UpdateRowsEventInfo(currentTable, timestamp, currentBinlogFile, currentBinlogPosition, event.getData());
currentSequenceId.set(updateRowsWriter.writeEvent(currentSession, transitUri, eventInfo, currentSequenceId.get(), REL_SUCCESS));
}
break;
case ROTATE:
// Update current binlog filename
RotateEventData rotateEventData = event.getData();
currentBinlogFile = rotateEventData.getBinlogFilename();
currentBinlogPosition = rotateEventData.getBinlogPosition();
break;
default:
break;
}
// Advance the current binlog position. This way if no more events are received and the processor is stopped, it will resume after the event that was just processed.
// We always get ROTATE and FORMAT_DESCRIPTION messages no matter where we start (even from the end), and they won't have the correct "next position" value, so only
// advance the position if it is not that type of event.
if (eventType != ROTATE && eventType != FORMAT_DESCRIPTION) {
currentBinlogPosition = header.getNextPosition();
}
}
}
protected void stop(StateManager stateManager) throws CDCException {
try {
if (binlogClient != null) {
binlogClient.disconnect();
}
if (eventListener != null) {
eventListener.stop();
if (binlogClient != null) {
binlogClient.unregisterEventListener(eventListener);
}
}
doStop.set(true);
if (hasRun.getAndSet(false)) {
updateState(stateManager, currentBinlogFile, currentBinlogPosition, currentSequenceId.get());
}
currentBinlogPosition = -1;
} catch (IOException e) {
throw new CDCException("Error closing CDC connection", e);
} finally {
binlogClient = null;
}
}
private void updateState(StateManager stateManager, String binlogFile, long binlogPosition, long sequenceId) throws IOException {
// Update state with latest values
if (stateManager != null) {
Map<String, String> newStateMap = new HashMap<>(stateManager.getState(Scope.CLUSTER).toMap());
// Save current binlog filename and position to the state map
if (binlogFile != null) {
newStateMap.put(BinlogEventInfo.BINLOG_FILENAME_KEY, binlogFile);
}
newStateMap.put(BinlogEventInfo.BINLOG_POSITION_KEY, Long.toString(binlogPosition));
newStateMap.put(EventWriter.SEQUENCE_ID_KEY, String.valueOf(sequenceId));
stateManager.setState(newStateMap, Scope.CLUSTER);
}
}
/**
* Creates and returns a BinlogEventListener instance, associated with the specified binlog client and event queue.
*
* @param client A reference to a BinaryLogClient. The listener is associated with the given client, such that the listener is notified when
* events are available to the given client.
* @param q A queue used to communicate events between the listener and the NiFi processor thread.
* @return A BinlogEventListener instance, which will be notified of events associated with the specified client
*/
BinlogEventListener createBinlogEventListener(BinaryLogClient client, LinkedBlockingQueue<RawBinlogEvent> q) {
return new BinlogEventListener(client, q);
}
/**
* Creates and returns a BinlogLifecycleListener instance, associated with the specified binlog client and event queue.
*
* @return A BinlogLifecycleListener instance, which will be notified of events associated with the specified client
*/
BinlogLifecycleListener createBinlogLifecycleListener() {
return new BinlogLifecycleListener();
}
BinaryLogClient createBinlogClient(String hostname, int port, String username, String password) {
return new BinaryLogClient(hostname, port, username, password);
}
/**
* Retrieves the column information for the specified database and table. The column information can be used to enrich CDC events coming from the RDBMS.
*
* @param key A TableInfoCacheKey reference, which contains the database and table names
* @return A TableInfo instance with the ColumnDefinitions provided (if retrieved successfully from the database)
*/
protected TableInfo loadTableInfo(TableInfoCacheKey key) throws SQLException {
TableInfo tableInfo = null;
if (jdbcConnection != null) {
try (Statement s = jdbcConnection.createStatement()) {
s.execute("USE " + key.getDatabaseName());
ResultSet rs = s.executeQuery("SELECT * FROM " + key.getTableName() + " LIMIT 0");
ResultSetMetaData rsmd = rs.getMetaData();
int numCols = rsmd.getColumnCount();
List<ColumnDefinition> columnDefinitions = new ArrayList<>();
for (int i = 1; i <= numCols; i++) {
// Use the column label if it exists, otherwise use the column name. We're not doing aliasing here, but it's better practice.
String columnLabel = rsmd.getColumnLabel(i);
columnDefinitions.add(new ColumnDefinition(rsmd.getColumnType(i), columnLabel != null ? columnLabel : rsmd.getColumnName(i)));
}
tableInfo = new TableInfo(key.getDatabaseName(), key.getTableName(), key.getTableId(), columnDefinitions);
}
}
return tableInfo;
}
/**
* using Thread.currentThread().getContextClassLoader(); will ensure that you are using the ClassLoader for you NAR.
*
* @throws InitializationException if there is a problem obtaining the ClassLoader
*/
protected Connection getJdbcConnection(String locationString, String drvName, InetSocketAddress host, String username, String password, Map<String, String> customProperties)
throws InitializationException, SQLException {
Properties connectionProps = new Properties();
if (customProperties != null) {
connectionProps.putAll(customProperties);
}
connectionProps.put("user", username);
connectionProps.put("password", password);
return DriverManager.getConnection("jdbc:mysql://" + host.getHostString() + ":" + host.getPort(), connectionProps);
}
protected void registerDriver(String locationString, String drvName) throws InitializationException {
if (locationString != null && locationString.length() > 0) {
try {
// Split and trim the entries
final ClassLoader classLoader = ClassLoaderUtils.getCustomClassLoader(
locationString,
this.getClass().getClassLoader(),
(dir, name) -> name != null && name.endsWith(".jar")
);
// Workaround which allows to use URLClassLoader for JDBC driver loading.
// (Because the DriverManager will refuse to use a driver not loaded by the system ClassLoader.)
final Class<?> clazz = Class.forName(drvName, true, classLoader);
if (clazz == null) {
throw new InitializationException("Can't load Database Driver " + drvName);
}
final Driver driver = (Driver) clazz.newInstance();
DriverManager.registerDriver(new DriverShim(driver));
} catch (final InitializationException e) {
throw e;
} catch (final MalformedURLException e) {
throw new InitializationException("Invalid Database Driver Jar Url", e);
} catch (final Exception e) {
throw new InitializationException("Can't load Database Driver", e);
}
}
}
private static class DriverShim implements Driver {
private Driver driver;
DriverShim(Driver d) {
this.driver = d;
}
@Override
public boolean acceptsURL(String u) throws SQLException {
return this.driver.acceptsURL(u);
}
@Override
public Connection connect(String u, Properties p) throws SQLException {
return this.driver.connect(u, p);
}
@Override
public int getMajorVersion() {
return this.driver.getMajorVersion();
}
@Override
public int getMinorVersion() {
return this.driver.getMinorVersion();
}
@Override
public DriverPropertyInfo[] getPropertyInfo(String u, Properties p) throws SQLException {
return this.driver.getPropertyInfo(u, p);
}
@Override
public boolean jdbcCompliant() {
return this.driver.jdbcCompliant();
}
@Override
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
return driver.getParentLogger();
}
}
}
| apache-2.0 |
lpxz/grail-derby104 | java/testing/org/apache/derbyTesting/unitTests/store/T_XA.java | 42110 | /*
Derby - Class org.apache.derbyTesting.unitTests.store.T_XA
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyTesting.unitTests.store;
import org.apache.derbyTesting.unitTests.harness.T_Generic;
import org.apache.derbyTesting.unitTests.harness.T_Fail;
import org.apache.derby.iapi.store.access.xa.*;
import org.apache.derby.iapi.store.access.*;
import org.apache.derby.iapi.services.io.FormatableBitSet;
import org.apache.derby.iapi.reference.Property;
import org.apache.derby.iapi.services.context.ContextService;
import org.apache.derby.iapi.services.context.ContextManager;
import org.apache.derby.iapi.services.monitor.Monitor;
import org.apache.derby.iapi.services.sanity.SanityManager;
import org.apache.derby.iapi.services.io.FormatIdUtil;
import org.apache.derby.iapi.error.StandardException;
import java.util.Properties;
import javax.transaction.xa.XAResource;
import javax.transaction.xa.Xid;
public class T_XA extends T_Generic
{
private static final String testService = "XaTest";
byte[] global_id =
{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 44, 44, 45, 46, 47, 48, 49,
50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
60, 61, 62, 63};
byte[] branch_id =
{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
30, 31, 32, 33, 34, 35, 36, 37, 38, 39,
40, 41, 42, 44, 44, 45, 46, 47, 48, 49,
50, 51, 52, 53, 54, 55, 56, 57, 58, 59,
60, 61, 62, 63};
AccessFactory store = null;
public T_XA()
{
super();
}
/*
** Methods of UnitTest.
*/
/*
** Methods required by T_Generic
*/
public String getModuleToTestProtocolName()
{
return AccessFactory.MODULE;
}
/**
@exception T_Fail Unexpected behaviour from the API
*/
protected void runTests() throws T_Fail
{
// Create a AccessFactory to test.
// don't automatic boot this service if it gets left around
if (startParams == null)
{
startParams = new Properties();
}
startParams.put(Property.NO_AUTO_BOOT, Boolean.TRUE.toString());
// remove the service directory to ensure a clean run
startParams.put(Property.DELETE_ON_CREATE, Boolean.TRUE.toString());
// see if we are testing encryption
startParams = T_Util.setEncryptionParam(startParams);
try {
store = (AccessFactory) Monitor.createPersistentService(
getModuleToTestProtocolName(), testService, startParams);
} catch (StandardException mse) {
throw T_Fail.exceptionFail(mse);
}
if (store == null)
{
throw T_Fail.testFailMsg(
getModuleToTestProtocolName() + " service not started.");
}
REPORT("(unitTestMain) Testing " + testService);
try {
XATest_1(new commit_method(store, true));
XATest_2(new commit_method(store, true));
XATest_3(new commit_method(store, true));
XATest_4(new commit_method(store, true));
XATest_5(new commit_method(store, true));
XATest_6(new commit_method(store, true));
XATest_1(new commit_method(store, false));
XATest_2(new commit_method(store, false));
XATest_3(new commit_method(store, false));
XATest_4(new commit_method(store, false));
XATest_5(new commit_method(store, false));
XATest_6(new commit_method(store, false));
}
catch (StandardException e)
{
String msg = e.getMessage();
if (msg == null)
msg = e.getClass().getName();
REPORT(msg);
e.printStackTrace();
throw T_Fail.exceptionFail(e);
}
catch (Throwable t)
{
t.printStackTrace();
}
}
/**************************************************************************
* Utility methods.
**************************************************************************
*/
/**************************************************************************
* Test Cases.
**************************************************************************
*/
/**
* one phase commit xa transaction.
* <p>
* @exception StandardException Standard exception policy.
**/
void XATest_1(
commit_method commit_method)
throws StandardException, T_Fail
{
REPORT("(XATest_1) starting");
ContextManager cm =
ContextService.getFactory().getCurrentContextManager();
// COMMIT AN IDLE TRANSACTION.
// Start a global transaction
XATransactionController xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// commit an idle transaction - using onePhase optimization.
commit_method.commit(true, 42, global_id, branch_id, xa_tc);
// done with this xact.
xa_tc.destroy();
// COMMIT AN UPDATE ONLY TRANSACTION.
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Create a heap conglomerate.
T_AccessRow template_row = new T_AccessRow(1);
long conglomid =
xa_tc.createConglomerate(
"heap", // create a heap conglomerate
template_row.getRowArray(), // 1 column template.
null, //column sort order - not required for heap
null, //default collation
null, // default properties
TransactionController.IS_DEFAULT); // not temporary
// commit an idle transaction - using onePhase optimization.
commit_method.commit(true, 42, global_id, branch_id, xa_tc);
// done with this xact.
xa_tc.destroy();
// COMMIT A READ ONLY TRANSACTION.
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Open a scan on the conglomerate.
ScanController scan1 = xa_tc.openScan(
conglomid,
false, // don't hold
0, // not for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_SERIALIZABLE,
(FormatableBitSet) null, // all columns, all as objects
null, // start position - first row in conglomerate
0, // unused if start position is null.
null, // qualifier - accept all rows
null, // stop position - last row in conglomerate
0); // unused if stop position is null.
scan1.next();
scan1.close();
// commit an idle transaction - using onePhase optimization.
commit_method.commit(true, 42, global_id, branch_id, xa_tc);
// done with this xact.
xa_tc.destroy();
REPORT("(XATest_1) finishing");
}
/**
* simple two phase commit xa transaction.
* <p>
* @exception StandardException Standard exception policy.
**/
void XATest_2(
commit_method commit_method)
throws StandardException, T_Fail
{
REPORT("(XATest_2) starting");
ContextManager cm =
ContextService.getFactory().getCurrentContextManager();
// COMMIT AN IDLE TRANSACTION.
// Start a global transaction
XATransactionController xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
if (!xa_tc.isGlobal())
{
throw T_Fail.testFailMsg("should be a global transaction.");
}
// This prepare will commit the idle transaction.
if (xa_tc.xa_prepare() != XATransactionController.XA_RDONLY)
{
throw T_Fail.testFailMsg(
"prepare of idle xact did not return XA_RDONLY.");
}
// commit an idle transaction - using onePhase optimization.
try
{
// this should fail as the xact has been committed, so committing
// it in 2 phase mode should fail. This test can't be run in
// offline mode, no transaction will be found. Pass null as
// global_id to make that test not run.
commit_method.commit(false, 42, null, null, xa_tc);
throw T_Fail.testFailMsg(
"A XA_RDONLY prepare-committed xact cant be 2P xa_committed.");
}
catch (StandardException se)
{
// expected exception - drop through.
}
// should not be able to find this global xact, it has been committed
if (((XAResourceManager) store.getXAResourceManager()).find(
new XAXactId(42, global_id, branch_id)) != null)
{
throw T_Fail.testFailMsg(
"A XA_RDONLY prepare-committed xact should not be findable.");
}
// done with this xact.
xa_tc.destroy();
// COMMIT AN UPDATE ONLY TRANSACTION.
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Create a heap conglomerate.
T_AccessRow template_row = new T_AccessRow(1);
long conglomid =
xa_tc.createConglomerate(
"heap", // create a heap conglomerate
template_row.getRowArray(), // 1 column template.
null, //column sort order - not required for heap
null, //default collation
null, // default properties
TransactionController.IS_DEFAULT); // not temporary
// prepare the update xact.
if (xa_tc.xa_prepare() != XATransactionController.XA_OK)
{
throw T_Fail.testFailMsg(
"prepare of update xact did not return XA_OK.");
}
// commit an idle transaction - using onePhase optimization.
commit_method.commit(false, 42, global_id, branch_id, xa_tc);
// done with this xact.
xa_tc.destroy();
// COMMIT A READ ONLY TRANSACTION.
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Open a scan on the conglomerate.
ScanController scan1 = xa_tc.openScan(
conglomid,
false, // don't hold
0, // not for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_SERIALIZABLE,
(FormatableBitSet) null, // all columns, all as objects
null, // start position - first row in conglomerate
0, // unused if start position is null.
null, // qualifier - accept all rows
null, // stop position - last row in conglomerate
0); // unused if stop position is null.
scan1.next();
scan1.close();
// This prepare will commit the idle transaction.
if (xa_tc.xa_prepare() != XATransactionController.XA_RDONLY)
{
throw T_Fail.testFailMsg(
"prepare of idle xact did not return XA_RDONLY.");
}
// commit an idle transaction - using onePhase optimization.
try
{
// this should fail as the xact has been committed, so committing
// it in 2 phase mode should fail. This test can't be run in
// offline mode, no transaction will be found. Pass null as
// global_id to make that test not run.
commit_method.commit(false, 42, null, null, xa_tc);
throw T_Fail.testFailMsg(
"A XA_RDONLY prepare-committed xact cant be 2P xa_committed.");
}
catch (StandardException se)
{
// expected exception - drop through.
}
// should not be able to find this global xact, it has been committed
if (((XAResourceManager) store.getXAResourceManager()).find(
new XAXactId(42, global_id, branch_id)) != null)
{
throw T_Fail.testFailMsg(
"A XA_RDONLY prepare-committed xact should not be findable.");
}
// done with this xact.
xa_tc.destroy();
REPORT("(XATest_2) finishing");
}
/**
* Test aborts of unprepared xa transaction.
* <p>
* @exception StandardException Standard exception policy.
**/
void XATest_3(
commit_method commit_method)
throws StandardException, T_Fail
{
REPORT("(XATest_3) starting");
ContextManager cm =
ContextService.getFactory().getCurrentContextManager();
// ABORT AN IDLE TRANSACTION.
// Start a global transaction
XATransactionController xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// commit an idle transaction - using onePhase optimization.
commit_method.rollback(42, global_id, branch_id, xa_tc);
// done with this xact.
xa_tc.destroy();
// ABORT AN UPDATE ONLY TRANSACTION.
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Create a heap conglomerate.
T_AccessRow template_row = new T_AccessRow(1);
long conglomid =
xa_tc.createConglomerate(
"heap", // create a heap conglomerate
template_row.getRowArray(), // 1 column template.
null, //column sort order - not required for heap
null, //default collation
null, // default properties
TransactionController.IS_DEFAULT); // not temporary
// commit an idle transaction - using onePhase optimization.
commit_method.rollback(42, global_id, branch_id, xa_tc);
// done with this xact.
xa_tc.destroy();
// ABORT A READ ONLY TRANSACTION.
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Create a heap conglomerate.
template_row = new T_AccessRow(1);
conglomid =
xa_tc.createConglomerate(
"heap", // create a heap conglomerate
template_row.getRowArray(), // 1 column template.
null, //column sort order - not required for heap
null, //default collation
null, // default properties
TransactionController.IS_DEFAULT); // not temporary
// commit an idle transaction - using onePhase optimization.
commit_method.commit(true, 42, global_id, branch_id, xa_tc);
// done with this xact.
xa_tc.destroy();
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Open a scan on the conglomerate.
ScanController scan1 = xa_tc.openScan(
conglomid,
false, // don't hold
0, // not for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_SERIALIZABLE,
(FormatableBitSet) null, // all columns, all as objects
null, // start position - first row in conglomerate
0, // unused if start position is null.
null, // qualifier - accept all rows
null, // stop position - last row in conglomerate
0); // unused if stop position is null.
scan1.next();
scan1.close();
// commit an idle transaction - using onePhase optimization.
commit_method.rollback(42, global_id, branch_id, xa_tc);
// done with this xact.
xa_tc.destroy();
REPORT("(XATest_3) finishing");
}
/**
* Test aborts of prepared two phase commit xa transaction.
* <p>
* @exception StandardException Standard exception policy.
**/
void XATest_4(
commit_method commit_method)
throws StandardException, T_Fail
{
REPORT("(XATest_4) starting");
ContextManager cm =
ContextService.getFactory().getCurrentContextManager();
// ABORT AN IDLE TRANSACTION.
// Start a global transaction
XATransactionController xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// This prepare will commit the idle transaction.
if (xa_tc.xa_prepare() != XATransactionController.XA_RDONLY)
{
throw T_Fail.testFailMsg(
"prepare of idle xact did not return XA_RDONLY.");
}
// nothing to do, will just abort the next current idle xact.
// after prepare/readonly we cna continue to use transaction
commit_method.commit(true, 42, null, null, xa_tc);
// should not be able to find this global xact, it has been committed
if (((XAResourceManager) store.getXAResourceManager()).find(
new XAXactId(42, global_id, branch_id)) != null)
{
throw T_Fail.testFailMsg(
"A XA_RDONLY prepare-committed xact should not be findable.");
}
// done with this xact.
xa_tc.destroy();
// ABORT AN UPDATE ONLY TRANSACTION.
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Create a heap conglomerate.
T_AccessRow template_row = new T_AccessRow(1);
long conglomid =
xa_tc.createConglomerate(
"heap", // create a heap conglomerate
template_row.getRowArray(), // 1 column template.
null, //column sort order - not required for heap
null, //default collation
null, // default properties
TransactionController.IS_DEFAULT); // not temporary
// Open a scan on the conglomerate, to verify the create happened,
// and to show that the same openScan done after abort fails.
ScanController scan1 = xa_tc.openScan(
conglomid,
false, // don't hold
0, // not for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_SERIALIZABLE,
(FormatableBitSet) null, // all columns, all as objects
null, // start position - first row in conglomerate
0, // unused if start position is null.
null, // qualifier - accept all rows
null, // stop position - last row in conglomerate
0); // unused if stop position is null.
scan1.next();
scan1.close();
// prepare the update xact.
if (xa_tc.xa_prepare() != XATransactionController.XA_OK)
{
throw T_Fail.testFailMsg(
"prepare of update xact did not return XA_OK.");
}
try
{
// Open a scan on the conglomerate.
scan1 = xa_tc.openScan(
conglomid,
false, // don't hold
0, // not for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_SERIALIZABLE,
(FormatableBitSet) null, // all columns, all as objects
null, // start position - first row in conglomerate
0, // unused if start position is null.
null, // qualifier - accept all rows
null, // stop position - last row in conglomerate
0); // unused if stop position is null.
scan1.next();
scan1.close();
throw T_Fail.testFailMsg(
"Should not be able to do anything on xact after prepare.");
}
catch (StandardException se)
{
// expected exception, fall through.
}
// commit an idle transaction - using onePhase optimization.
commit_method.rollback(42, global_id, branch_id, xa_tc);
commit_method.commit(true, 42, null, null, xa_tc);
// should not be able to find this global xact, it has been committed
if (((XAResourceManager) store.getXAResourceManager()).find(
new XAXactId(42, global_id, branch_id)) != null)
{
throw T_Fail.testFailMsg(
"A xa_rollbacked xact should not be findable.");
}
// done with this xact.
xa_tc.destroy();
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
try
{
// Open a scan on the conglomerate.
scan1 = xa_tc.openScan(
conglomid,
false, // don't hold
0, // not for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_SERIALIZABLE,
(FormatableBitSet) null, // all columns, all as objects
null, // start position - first row in conglomerate
0, // unused if start position is null.
null, // qualifier - accept all rows
null, // stop position - last row in conglomerate
0); // unused if stop position is null.
scan1.next();
scan1.close();
throw T_Fail.testFailMsg(
"Should not be able to open conglom, the create was aborted.");
}
catch (StandardException se)
{
// expected exception, fall through.
}
xa_tc.destroy();
// ABORT A READ ONLY TRANSACTION.
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Create a heap conglomerate.
template_row = new T_AccessRow(1);
conglomid =
xa_tc.createConglomerate(
"heap", // create a heap conglomerate
template_row.getRowArray(), // 1 column template.
null, //column sort order - not required for heap
null, //default collation
null, // default properties
TransactionController.IS_DEFAULT); // not temporary
commit_method.commit(true, 42, global_id, branch_id, xa_tc);
xa_tc.destroy();
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Open a scan on the conglomerate.
scan1 = xa_tc.openScan(
conglomid,
false, // don't hold
0, // not for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_SERIALIZABLE,
(FormatableBitSet) null, // all columns, all as objects
null, // start position - first row in conglomerate
0, // unused if start position is null.
null, // qualifier - accept all rows
null, // stop position - last row in conglomerate
0); // unused if stop position is null.
scan1.next();
scan1.close();
// This prepare will commit the idle transaction.
if (xa_tc.xa_prepare() != XATransactionController.XA_RDONLY)
{
throw T_Fail.testFailMsg(
"prepare of idle xact did not return XA_RDONLY.");
}
// commit an idle transaction - using onePhase optimization.
commit_method.commit(true, 42, null, null, xa_tc);
// should not be able to find this global xact, it has been committed
if (((XAResourceManager) store.getXAResourceManager()).find(
new XAXactId(42, global_id, branch_id)) != null)
{
throw T_Fail.testFailMsg(
"A XA_RDONLY prepare-committed xact should not be findable.");
}
// done with this xact.
xa_tc.destroy();
REPORT("(XATest_5) finishing");
}
/**
* Very simple testing of the recover() call.
* <p>
* @exception StandardException Standard exception policy.
**/
void XATest_5(
commit_method commit_method)
throws StandardException, T_Fail
{
REPORT("(XATest_5) starting");
// Should be no prepared transactions when we first start.
if (((XAResourceManager) store.getXAResourceManager()).recover(
XAResource.TMSTARTRSCAN).length != 0)
{
throw T_Fail.testFailMsg(
"recover incorrectly returned prepared xacts.");
}
// Should be no prepared transactions when we first start.
if (((XAResourceManager) store.getXAResourceManager()).recover(
XAResource.TMNOFLAGS).length != 0)
{
throw T_Fail.testFailMsg("NOFLAGS should always return 0.");
}
ContextManager cm =
ContextService.getFactory().getCurrentContextManager();
// COMMIT AN IDLE TRANSACTION.
// Start a global transaction
XATransactionController xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Should be no prepared transactions, there is one idle global xact.
if (((XAResourceManager) store.getXAResourceManager()).recover(
XAResource.TMSTARTRSCAN).length != 0)
{
throw T_Fail.testFailMsg(
"recover incorrectly returned prepared xacts.");
}
// commit an idle transaction - using onePhase optimization.
commit_method.commit(true, 42, global_id, branch_id, xa_tc);
// done with this xact.
xa_tc.destroy();
// COMMIT AN UPDATE ONLY TRANSACTION.
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Create a heap conglomerate.
T_AccessRow template_row = new T_AccessRow(1);
long conglomid =
xa_tc.createConglomerate(
"heap", // create a heap conglomerate
template_row.getRowArray(), // 1 column template.
null, //column sort order - not required for heap
null, //default collation
null, // default properties
TransactionController.IS_DEFAULT); // not temporary
// Should be no prepared transactions, there is one update global xact.
if (((XAResourceManager) store.getXAResourceManager()).recover(
XAResource.TMSTARTRSCAN).length != 0)
{
throw T_Fail.testFailMsg(
"recover incorrectly returned prepared xacts.");
}
// commit an idle transaction - using onePhase optimization.
commit_method.commit(true, 42, global_id, branch_id, xa_tc);
// done with this xact.
xa_tc.destroy();
// COMMIT A READ ONLY TRANSACTION.
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Open a scan on the conglomerate.
ScanController scan1 = xa_tc.openScan(
conglomid,
false, // don't hold
0, // not for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_SERIALIZABLE,
(FormatableBitSet) null, // all columns, all as objects
null, // start position - first row in conglomerate
0, // unused if start position is null.
null, // qualifier - accept all rows
null, // stop position - last row in conglomerate
0); // unused if stop position is null.
scan1.next();
scan1.close();
// Should be no prepared transactions, there is one update global xact.
if (((XAResourceManager) store.getXAResourceManager()).recover(
XAResource.TMSTARTRSCAN).length != 0)
{
throw T_Fail.testFailMsg(
"recover incorrectly returned prepared xacts.");
}
// commit an idle transaction - using onePhase optimization.
commit_method.commit(true, 42, global_id, branch_id, xa_tc);
// done with this xact.
xa_tc.destroy();
// PREPARE AN UPDATE TRANSACTION.
// Start a global transaction
xa_tc = (XATransactionController)
store.startXATransaction(
cm,
42, // fake format id
global_id,
branch_id);
// Create a heap conglomerate.
template_row = new T_AccessRow(1);
conglomid =
xa_tc.createConglomerate(
"heap", // create a heap conglomerate
template_row.getRowArray(), // 1 column template.
null, //column sort order - not required for heap
null, //default collation
null, // default properties
TransactionController.IS_DEFAULT); // not temporary
// Should be no prepared transactions, there is one update global xact.
if (((XAResourceManager) store.getXAResourceManager()).recover(
XAResource.TMSTARTRSCAN).length != 0)
{
throw T_Fail.testFailMsg(
"recover incorrectly returned prepared xacts.");
}
// prepare the update xact.
if (xa_tc.xa_prepare() != XATransactionController.XA_OK)
{
throw T_Fail.testFailMsg(
"prepare of update xact did not return XA_OK.");
}
try
{
// Open a scan on the conglomerate.
scan1 = xa_tc.openScan(
conglomid,
false, // don't hold
0, // not for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_SERIALIZABLE,
(FormatableBitSet) null, // all columns, all as objects
null, // start position - first row in conglomerate
0, // unused if start position is null.
null, // qualifier - accept all rows
null, // stop position - last row in conglomerate
0); // unused if stop position is null.
scan1.next();
scan1.close();
throw T_Fail.testFailMsg(
"Should not be able to do anything on xact after prepare.");
}
catch (StandardException se)
{
// expected exception, fall through.
}
// Should be no prepared transactions, there is one update global xact.
Xid[] prepared_xacts =
((XAResourceManager) store.getXAResourceManager()).recover(
XAResource.TMSTARTRSCAN);
if (prepared_xacts.length != 1)
{
throw T_Fail.testFailMsg(
"recover incorrectly returned wrong prepared xacts.");
}
if (prepared_xacts[0].getFormatId() != 42)
throw T_Fail.testFailMsg(
"bad format id = " + prepared_xacts[0].getFormatId());
byte[] gid = prepared_xacts[0].getGlobalTransactionId();
if (!java.util.Arrays.equals(gid, global_id))
{
throw T_Fail.testFailMsg(
"bad global id = " + org.apache.derbyTesting.unitTests.util.BitUtil.hexDump(gid));
}
byte[] bid = prepared_xacts[0].getBranchQualifier();
if (!java.util.Arrays.equals(bid, branch_id))
{
throw T_Fail.testFailMsg(
"bad branch id = " + org.apache.derbyTesting.unitTests.util.BitUtil.hexDump(bid));
}
if (((XAResourceManager) store.getXAResourceManager()).recover(
XAResource.TMNOFLAGS).length != 0)
{
throw T_Fail.testFailMsg("NOFLAGS should always return 0.");
}
// commit a prepared transaction - using two phase.
commit_method.commit(false, 42, global_id, branch_id, xa_tc);
// Should be no prepared transactions, there is one update global xact.
if (((XAResourceManager) store.getXAResourceManager()).recover(
XAResource.TMSTARTRSCAN).length != 0)
{
throw T_Fail.testFailMsg(
"recover incorrectly returned prepared xacts.");
}
// done with this xact.
xa_tc.destroy();
// Should be no prepared transactions, there is one update global xact.
if (((XAResourceManager) store.getXAResourceManager()).recover(
XAResource.TMSTARTRSCAN).length != 0)
{
throw T_Fail.testFailMsg(
"recover incorrectly returned prepared xacts.");
}
REPORT("(XATest_5) finishing");
}
/**
* Very simple testing of changing a local transaction to a global.
* <p>
* @exception StandardException Standard exception policy.
**/
void XATest_6(
commit_method commit_method)
throws StandardException, T_Fail
{
REPORT("(XATest_5) starting");
ContextManager cm =
ContextService.getFactory().getCurrentContextManager();
TransactionController tc = store.getTransaction(cm);
// Create a heap conglomerate.
T_AccessRow template_row = new T_AccessRow(1);
long conglomid =
tc.createConglomerate(
"heap", // create a heap conglomerate
template_row.getRowArray(), // 1 column template.
null, //column sort order - not required for heap
null, //default collation
null, // default properties
TransactionController.IS_DEFAULT); // not temporary
tc.commit();
// COMMIT AN IDLE TRANSACTION.
// Start a global transaction
XATransactionController xa_tc = (XATransactionController)
tc.createXATransactionFromLocalTransaction(
42, // fake format id
global_id,
branch_id);
if (!xa_tc.isGlobal())
{
throw T_Fail.testFailMsg("should be a global transaction.");
}
// Open a scan on the conglomerate.
ScanController scan1 = xa_tc.openScan(
conglomid,
false, // don't hold
0, // not for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_SERIALIZABLE,
(FormatableBitSet) null, // all columns, all as objects
null, // start position - first row in conglomerate
0, // unused if start position is null.
null, // qualifier - accept all rows
null, // stop position - last row in conglomerate
0); // unused if stop position is null.
scan1.next();
scan1.close();
// Create a heap conglomerate.
template_row = new T_AccessRow(1);
conglomid =
xa_tc.createConglomerate(
"heap", // create a heap conglomerate
template_row.getRowArray(), // 1 column template.
null, //column sort order - not required for heap
null, //default collation
null, // default properties
TransactionController.IS_DEFAULT); // not temporary
// Should be no prepared transactions, there is one update global xact.
if (((XAResourceManager) store.getXAResourceManager()).recover(
XAResource.TMSTARTRSCAN).length != 0)
{
throw T_Fail.testFailMsg(
"recover incorrectly returned prepared xacts.");
}
// prepare the update xact.
if (xa_tc.xa_prepare() != XATransactionController.XA_OK)
{
throw T_Fail.testFailMsg(
"prepare of update xact did not return XA_OK.");
}
try
{
// Open a scan on the conglomerate.
scan1 = xa_tc.openScan(
conglomid,
false, // don't hold
0, // not for update
TransactionController.MODE_RECORD,
TransactionController.ISOLATION_SERIALIZABLE,
(FormatableBitSet) null, // all columns, all as objects
null, // start position - first row in conglomerate
0, // unused if start position is null.
null, // qualifier - accept all rows
null, // stop position - last row in conglomerate
0); // unused if stop position is null.
scan1.next();
scan1.close();
throw T_Fail.testFailMsg(
"Should not be able to do anything on xact after prepare.");
}
catch (StandardException se)
{
// expected exception, fall through.
}
// commit a prepared transaction - using two phase.
commit_method.commit(false, 42, global_id, branch_id, xa_tc);
xa_tc.destroy();
REPORT("(XATest_6) finishing");
}
}
class commit_method
{
private boolean online_xact;
private AccessFactory store;
public commit_method(
AccessFactory store,
boolean online_xact)
{
this.store = store;
this.online_xact = online_xact;
}
public void commit(
boolean one_phase,
int format_id,
byte[] global_id,
byte[] branch_id,
XATransactionController xa_tc)
throws StandardException
{
if (SanityManager.DEBUG)
SanityManager.ASSERT((global_id != null) || (xa_tc != null));
boolean local_online_xact = online_xact;
if (global_id == null)
local_online_xact = true;
if (xa_tc == null)
local_online_xact = false;
if (local_online_xact)
{
xa_tc.xa_commit(one_phase);
}
else
{
Xid xid = new XAXactId(format_id, global_id, branch_id);
ContextManager cm =
((XAResourceManager) store.getXAResourceManager()).find(xid);
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(cm != null, "could not find xid = " + xid);
SanityManager.ASSERT(
cm ==
ContextService.getFactory().getCurrentContextManager(),
"cm = " + cm +
"current = " +
ContextService.getFactory().getCurrentContextManager());
}
((XAResourceManager) store.getXAResourceManager()).commit(
cm, xid, one_phase);
}
}
public void rollback(
int format_id,
byte[] global_id,
byte[] branch_id,
XATransactionController xa_tc)
throws StandardException
{
if (SanityManager.DEBUG)
SanityManager.ASSERT((global_id != null) || (xa_tc != null));
boolean local_online_xact = online_xact;
if (global_id == null)
local_online_xact = true;
if (xa_tc == null)
local_online_xact = false;
if (local_online_xact)
{
xa_tc.xa_rollback();
}
else
{
Xid xid = new XAXactId(format_id, global_id, branch_id);
ContextManager cm =
((XAResourceManager) store.getXAResourceManager()).find(xid);
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(cm != null, "could not find xid = " + xid);
SanityManager.ASSERT(
cm ==
ContextService.getFactory().getCurrentContextManager(),
"cm = " + cm +
"current = " +
ContextService.getFactory().getCurrentContextManager());
}
((XAResourceManager) store.getXAResourceManager()).rollback(
cm, xid);
}
}
}
| apache-2.0 |
facebook/presto | presto-main/src/main/java/com/facebook/presto/operator/PageBufferClient.java | 20089 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator;
import com.facebook.airlift.http.client.HttpUriBuilder;
import com.facebook.airlift.log.Logger;
import com.facebook.presto.server.remotetask.Backoff;
import com.facebook.presto.spi.HostAddress;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.page.SerializedPage;
import com.google.common.base.Ticker;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import org.joda.time.DateTime;
import javax.annotation.Nullable;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
import java.io.Closeable;
import java.net.URI;
import java.util.List;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.OptionalLong;
import java.util.concurrent.Executor;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import static com.facebook.presto.spi.HostAddress.fromUri;
import static com.facebook.presto.spi.StandardErrorCode.REMOTE_BUFFER_CLOSE_FAILED;
import static com.facebook.presto.spi.StandardErrorCode.REMOTE_TASK_MISMATCH;
import static com.facebook.presto.spi.StandardErrorCode.SERIALIZED_PAGE_CHECKSUM_ERROR;
import static com.facebook.presto.spi.page.PagesSerdeUtil.isChecksumValid;
import static com.facebook.presto.util.Failures.REMOTE_TASK_MISMATCH_ERROR;
import static com.facebook.presto.util.Failures.WORKER_NODE_ERROR;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.isNullOrEmpty;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
@ThreadSafe
public final class PageBufferClient
implements Closeable
{
private static final Logger log = Logger.get(PageBufferClient.class);
/**
* For each request, the addPage method will be called zero or more times,
* followed by either requestComplete or clientFinished (if buffer complete). If the client is
* closed, requestComplete or bufferFinished may never be called.
* <p/>
* <b>NOTE:</b> Implementations of this interface are not allowed to perform
* blocking operations.
*/
public interface ClientCallback
{
boolean addPages(PageBufferClient client, List<SerializedPage> pages);
void requestComplete(PageBufferClient client);
void clientFinished(PageBufferClient client);
void clientFailed(PageBufferClient client, Throwable cause);
}
private final RpcShuffleClient resultClient;
private final boolean acknowledgePages;
private final URI location;
private final Optional<URI> asyncPageTransportLocation;
private final ClientCallback clientCallback;
private final ScheduledExecutorService scheduler;
private final Backoff backoff;
@GuardedBy("this")
private boolean closed;
@GuardedBy("this")
private ListenableFuture<?> future;
@GuardedBy("this")
private DateTime lastUpdate = DateTime.now();
@GuardedBy("this")
private long token;
@GuardedBy("this")
private boolean scheduled;
@GuardedBy("this")
private boolean completed;
@GuardedBy("this")
private String taskInstanceId;
private final AtomicLong rowsReceived = new AtomicLong();
private final AtomicInteger pagesReceived = new AtomicInteger();
private final AtomicLong rowsRejected = new AtomicLong();
private final AtomicInteger pagesRejected = new AtomicInteger();
private final AtomicInteger requestsScheduled = new AtomicInteger();
private final AtomicInteger requestsCompleted = new AtomicInteger();
private final AtomicInteger requestsFailed = new AtomicInteger();
private final Executor pageBufferClientCallbackExecutor;
public PageBufferClient(
RpcShuffleClient resultClient,
Duration maxErrorDuration,
boolean acknowledgePages,
URI location,
Optional<URI> asyncPageTransportLocation,
ClientCallback clientCallback,
ScheduledExecutorService scheduler,
Executor pageBufferClientCallbackExecutor)
{
this(resultClient, maxErrorDuration, acknowledgePages, location, asyncPageTransportLocation, clientCallback, scheduler, Ticker.systemTicker(), pageBufferClientCallbackExecutor);
}
public PageBufferClient(
RpcShuffleClient resultClient,
Duration maxErrorDuration,
boolean acknowledgePages,
URI location,
Optional<URI> asyncPageTransportLocation,
ClientCallback clientCallback,
ScheduledExecutorService scheduler,
Ticker ticker,
Executor pageBufferClientCallbackExecutor)
{
this.resultClient = requireNonNull(resultClient, "resultClient is null");
this.acknowledgePages = acknowledgePages;
this.location = requireNonNull(location, "location is null");
this.asyncPageTransportLocation = requireNonNull(asyncPageTransportLocation, "asyncPageTransportLocation is null");
this.clientCallback = requireNonNull(clientCallback, "clientCallback is null");
this.scheduler = requireNonNull(scheduler, "scheduler is null");
this.pageBufferClientCallbackExecutor = requireNonNull(pageBufferClientCallbackExecutor, "pageBufferClientCallbackExecutor is null");
requireNonNull(maxErrorDuration, "maxErrorDuration is null");
requireNonNull(ticker, "ticker is null");
this.backoff = new Backoff(maxErrorDuration, ticker);
}
public synchronized PageBufferClientStatus getStatus()
{
String state;
if (closed) {
state = "closed";
}
else if (future != null) {
state = "running";
}
else if (scheduled) {
state = "scheduled";
}
else if (completed) {
state = "completed";
}
else {
state = "queued";
}
long rejectedRows = rowsRejected.get();
int rejectedPages = pagesRejected.get();
return new PageBufferClientStatus(
location,
state,
lastUpdate,
rowsReceived.get(),
pagesReceived.get(),
rejectedRows == 0 ? OptionalLong.empty() : OptionalLong.of(rejectedRows),
rejectedPages == 0 ? OptionalInt.empty() : OptionalInt.of(rejectedPages),
requestsScheduled.get(),
requestsCompleted.get(),
requestsFailed.get(),
future == null ? "not scheduled" : "processing request");
}
public synchronized boolean isRunning()
{
return future != null;
}
@Override
public void close()
{
boolean shouldSendDelete;
Future<?> future;
synchronized (this) {
shouldSendDelete = !closed;
closed = true;
future = this.future;
this.future = null;
lastUpdate = DateTime.now();
}
if (future != null && !future.isDone()) {
// do not terminate if the request is already running to avoid closing pooled connections
future.cancel(false);
}
// abort the output buffer on the remote node; response of delete is ignored
if (shouldSendDelete) {
sendDelete();
}
}
public synchronized void scheduleRequest(DataSize maxResponseSize)
{
if (closed || (future != null) || scheduled) {
return;
}
scheduled = true;
// start before scheduling to include error delay
backoff.startRequest();
long delayNanos = backoff.getBackoffDelayNanos();
scheduler.schedule(() -> {
try {
initiateRequest(maxResponseSize);
}
catch (Throwable t) {
// should not happen, but be safe and fail the operator
clientCallback.clientFailed(PageBufferClient.this, t);
}
}, delayNanos, NANOSECONDS);
lastUpdate = DateTime.now();
requestsScheduled.incrementAndGet();
}
private synchronized void initiateRequest(DataSize maxResponseSize)
{
scheduled = false;
if (closed || (future != null)) {
return;
}
if (completed) {
sendDelete();
}
else {
sendGetResults(maxResponseSize);
}
lastUpdate = DateTime.now();
}
private synchronized void sendGetResults(DataSize maxResponseSize)
{
URI uriBase = asyncPageTransportLocation.orElse(location);
URI uri = HttpUriBuilder.uriBuilderFrom(uriBase).appendPath(String.valueOf(token)).build();
ListenableFuture<PagesResponse> resultFuture = resultClient.getResults(token, maxResponseSize);
future = resultFuture;
Futures.addCallback(resultFuture, new FutureCallback<PagesResponse>()
{
@Override
public void onSuccess(PagesResponse result)
{
checkNotHoldsLock(this);
backoff.success();
List<SerializedPage> pages;
try {
boolean shouldAcknowledge = false;
synchronized (PageBufferClient.this) {
if (taskInstanceId == null) {
taskInstanceId = result.getTaskInstanceId();
}
if (!isNullOrEmpty(taskInstanceId) && !result.getTaskInstanceId().equals(taskInstanceId)) {
// TODO: update error message
throw new PrestoException(REMOTE_TASK_MISMATCH, format("%s (%s)", REMOTE_TASK_MISMATCH_ERROR, fromUri(uri)));
}
if (result.getToken() == token) {
pages = result.getPages();
token = result.getNextToken();
shouldAcknowledge = pages.size() > 0;
}
else {
pages = ImmutableList.of();
}
}
if (shouldAcknowledge && acknowledgePages) {
// Acknowledge token without handling the response.
// The next request will also make sure the token is acknowledged.
// This is to fast release the pages on the buffer side.
resultClient.acknowledgeResultsAsync(result.getNextToken());
}
for (SerializedPage page : pages) {
if (!isChecksumValid(page)) {
throw new PrestoException(SERIALIZED_PAGE_CHECKSUM_ERROR, format("Received corrupted serialized page from host %s", HostAddress.fromUri(uri)));
}
}
// add pages:
// addPages must be called regardless of whether pages is an empty list because
// clientCallback can keep stats of requests and responses. For example, it may
// keep track of how often a client returns empty response and adjust request
// frequency or buffer size.
if (clientCallback.addPages(PageBufferClient.this, pages)) {
pagesReceived.addAndGet(pages.size());
rowsReceived.addAndGet(pages.stream().mapToLong(SerializedPage::getPositionCount).sum());
}
else {
pagesRejected.addAndGet(pages.size());
rowsRejected.addAndGet(pages.stream().mapToLong(SerializedPage::getPositionCount).sum());
}
}
catch (PrestoException e) {
handleFailure(e, resultFuture);
return;
}
synchronized (PageBufferClient.this) {
// client is complete, acknowledge it by sending it a delete in the next request
if (result.isClientComplete()) {
completed = true;
}
if (future == resultFuture) {
future = null;
}
lastUpdate = DateTime.now();
}
requestsCompleted.incrementAndGet();
clientCallback.requestComplete(PageBufferClient.this);
}
@Override
public void onFailure(Throwable t)
{
log.debug("Request to %s failed %s", uri, t);
checkNotHoldsLock(this);
t = resultClient.rewriteException(t);
if (!(t instanceof PrestoException) && backoff.failure()) {
String message = format("%s (%s - %s failures, failure duration %s, total failed request time %s)",
WORKER_NODE_ERROR,
uri,
backoff.getFailureCount(),
backoff.getFailureDuration().convertTo(SECONDS),
backoff.getFailureRequestTimeTotal().convertTo(SECONDS));
t = new PageTransportTimeoutException(fromUri(uri), message, t);
}
handleFailure(t, resultFuture);
}
}, pageBufferClientCallbackExecutor);
}
private synchronized void sendDelete()
{
ListenableFuture<?> resultFuture = resultClient.abortResults();
future = resultFuture;
Futures.addCallback(resultFuture, new FutureCallback<Object>()
{
@Override
public void onSuccess(@Nullable Object result)
{
checkNotHoldsLock(this);
backoff.success();
synchronized (PageBufferClient.this) {
closed = true;
if (future == resultFuture) {
future = null;
}
lastUpdate = DateTime.now();
}
requestsCompleted.incrementAndGet();
clientCallback.clientFinished(PageBufferClient.this);
}
@Override
public void onFailure(Throwable t)
{
checkNotHoldsLock(this);
log.error("Request to delete %s failed %s", location, t);
if (!(t instanceof PrestoException) && backoff.failure()) {
String message = format("Error closing remote buffer (%s - %s failures, failure duration %s, total failed request time %s)",
location,
backoff.getFailureCount(),
backoff.getFailureDuration().convertTo(SECONDS),
backoff.getFailureRequestTimeTotal().convertTo(SECONDS));
t = new PrestoException(REMOTE_BUFFER_CLOSE_FAILED, message, t);
}
handleFailure(t, resultFuture);
}
}, pageBufferClientCallbackExecutor);
}
private static void checkNotHoldsLock(Object lock)
{
checkState(!Thread.holdsLock(lock), "Cannot execute this method while holding a lock");
}
private void handleFailure(Throwable t, ListenableFuture<?> expectedFuture)
{
// Can not delegate to other callback while holding a lock on this
checkNotHoldsLock(this);
requestsFailed.incrementAndGet();
requestsCompleted.incrementAndGet();
if (t instanceof PrestoException) {
clientCallback.clientFailed(PageBufferClient.this, t);
}
synchronized (PageBufferClient.this) {
if (future == expectedFuture) {
future = null;
}
lastUpdate = DateTime.now();
}
clientCallback.requestComplete(PageBufferClient.this);
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PageBufferClient that = (PageBufferClient) o;
if (!location.equals(that.location)) {
return false;
}
return true;
}
@Override
public int hashCode()
{
return location.hashCode();
}
@Override
public String toString()
{
String state;
synchronized (this) {
if (closed) {
state = "CLOSED";
}
else if (future != null) {
state = "RUNNING";
}
else {
state = "QUEUED";
}
}
return toStringHelper(this)
.add("location", location)
.addValue(state)
.toString();
}
public static class PagesResponse
{
public static PagesResponse createPagesResponse(String taskInstanceId, long token, long nextToken, Iterable<SerializedPage> pages, boolean complete)
{
return new PagesResponse(taskInstanceId, token, nextToken, pages, complete);
}
public static PagesResponse createEmptyPagesResponse(String taskInstanceId, long token, long nextToken, boolean complete)
{
return new PagesResponse(taskInstanceId, token, nextToken, ImmutableList.of(), complete);
}
private final String taskInstanceId;
private final long token;
private final long nextToken;
private final List<SerializedPage> pages;
private final boolean clientComplete;
private PagesResponse(String taskInstanceId, long token, long nextToken, Iterable<SerializedPage> pages, boolean clientComplete)
{
this.taskInstanceId = taskInstanceId;
this.token = token;
this.nextToken = nextToken;
this.pages = ImmutableList.copyOf(pages);
this.clientComplete = clientComplete;
}
public long getToken()
{
return token;
}
public long getNextToken()
{
return nextToken;
}
public List<SerializedPage> getPages()
{
return pages;
}
public boolean isClientComplete()
{
return clientComplete;
}
public String getTaskInstanceId()
{
return taskInstanceId;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("token", token)
.add("nextToken", nextToken)
.add("pagesSize", pages.size())
.add("clientComplete", clientComplete)
.toString();
}
}
}
| apache-2.0 |