repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
medicayun/medicayundicom | dcm4chee-xds/trunk/dcm4chee-xds-store/dcm4chee-xds-store-mbean/src/test/java/org/dcm4chee/xds/store/mbean/XDSStoreTestBase.java | 1830 | package org.dcm4chee.xds.store.mbean;
import java.io.IOException;
import java.net.URL;
import org.dcm4chee.docstore.DocumentStorageRegistry;
import org.dcm4chee.docstore.DocumentStore;
import org.dcm4chee.xds.common.XDSConstants;
import org.dcm4chee.xds.common.exception.XDSException;
import org.dcm4chee.xds.common.store.XDSDocument;
import org.dcm4chee.xds.common.store.XDSDocumentWriter;
import org.dcm4chee.xds.common.store.XDSDocumentWriterFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import junit.framework.TestCase;
public class XDSStoreTestBase extends TestCase {
public static final String MIME_TEXT_XML = "text/xml";
public static final String MIME_TEXT_PLAIN = "text/plain";
protected static DocumentStorageRegistry registry;
protected static XDSStoreService xdsStore;
public static final XDSDocumentWriterFactory fac = XDSDocumentWriterFactory.getInstance();
private static Logger log = LoggerFactory.getLogger(XDSStoreTestBase.class);
public XDSStoreTestBase() {
init();
}
private void init() {
try {
if ( registry == null ) {
registry = new DocumentStorageRegistry();
ClassLoader cl = Thread.currentThread().getContextClassLoader();
URL url = cl.getResource("test_docstore_cfg.xml");
log.info("################## XDSStore Test docstore cfg file:"+url);
registry.config(url.toExternalForm());
DocumentStore.setDocumentStorageRegistry(registry);
xdsStore = new XDSStoreService();
xdsStore.startService();
xdsStore.setStoreBeforeRegisterPool("uncommitted");
}
} catch (Exception e) {
fail("Exception in init()!");
}
}
}
| apache-2.0 |
huitseeker/nd4j | nd4j-backends/nd4j-api-parent/nd4j-api/src/main/java/org/nd4j/linalg/api/rng/distribution/impl/UniformDistribution.java | 5952 | /*-
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*
*/
package org.nd4j.linalg.api.rng.distribution.impl;
import org.apache.commons.math3.exception.NumberIsTooLargeException;
import org.apache.commons.math3.exception.OutOfRangeException;
import org.apache.commons.math3.exception.util.LocalizedFormats;
import org.nd4j.linalg.api.iter.NdIndexIterator;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.rng.distribution.BaseDistribution;
import org.nd4j.linalg.factory.Nd4j;
import java.util.Iterator;
/**
* Base distribution derived from apache commons math
* http://commons.apache.org/proper/commons-math/
* <p/>
* (specifically the {@link org.apache.commons.math3.distribution.UniformIntegerDistribution}
*
* @author Adam Gibson
*/
public class UniformDistribution extends BaseDistribution {
private double upper, lower;
/**
* Create a uniform real distribution using the given lower and upper
* bounds.
*
* @param lower Lower bound of this distribution (inclusive).
* @param upper Upper bound of this distribution (exclusive).
* @throws NumberIsTooLargeException if {@code lower >= upper}.
*/
public UniformDistribution(double lower, double upper) throws NumberIsTooLargeException {
this(Nd4j.getRandom(), lower, upper);
}
/**
* Creates a uniform distribution.
*
* @param rng Random number generator.
* @param lower Lower bound of this distribution (inclusive).
* @param upper Upper bound of this distribution (exclusive).
* @throws NumberIsTooLargeException if {@code lower >= upper}.
* @since 3.1
*/
public UniformDistribution(org.nd4j.linalg.api.rng.Random rng, double lower, double upper)
throws NumberIsTooLargeException {
super(rng);
if (lower >= upper) {
throw new NumberIsTooLargeException(LocalizedFormats.LOWER_BOUND_NOT_BELOW_UPPER_BOUND, lower, upper,
false);
}
this.lower = lower;
this.upper = upper;
}
/**
* {@inheritDoc}
*/
public double density(double x) {
if (x < lower || x > upper) {
return 0.0;
}
return 1 / (upper - lower);
}
/**
* {@inheritDoc}
*/
public double cumulativeProbability(double x) {
if (x <= lower) {
return 0;
}
if (x >= upper) {
return 1;
}
return (x - lower) / (upper - lower);
}
@Override
public double cumulativeProbability(double x0, double x1) throws NumberIsTooLargeException {
return 0;
}
@Override
public double inverseCumulativeProbability(final double p) throws OutOfRangeException {
if (p < 0.0 || p > 1.0) {
throw new OutOfRangeException(p, 0, 1);
}
return p * (upper - lower) + lower;
}
/**
* {@inheritDoc}
* <p/>
* For lower bound {@code lower} and upper bound {@code upper}, the mean is
* {@code 0.5 * (lower + upper)}.
*/
public double getNumericalMean() {
return 0.5 * (lower + upper);
}
/**
* {@inheritDoc}
* <p/>
* For lower bound {@code lower} and upper bound {@code upper}, the
* variance is {@code (upper - lower)^2 / 12}.
*/
public double getNumericalVariance() {
double ul = upper - lower;
return ul * ul / 12;
}
/**
* {@inheritDoc}
* <p/>
* The lower bound of the support is equal to the lower bound parameter
* of the distribution.
*
* @return lower bound of the support
*/
public double getSupportLowerBound() {
return lower;
}
/**
* {@inheritDoc}
* <p/>
* The upper bound of the support is equal to the upper bound parameter
* of the distribution.
*
* @return upper bound of the support
*/
public double getSupportUpperBound() {
return upper;
}
/**
* {@inheritDoc}
*/
public boolean isSupportLowerBoundInclusive() {
return true;
}
/**
* {@inheritDoc}
*/
public boolean isSupportUpperBoundInclusive() {
return true;
}
/**
* {@inheritDoc}
* <p/>
* The support of this distribution is connected.
*
* @return {@code true}
*/
public boolean isSupportConnected() {
return true;
}
/**
* {@inheritDoc}
*/
@Override
public double sample() {
final double u = random.nextDouble();
return u * upper + (1 - u) * lower;
}
@Override
public INDArray sample(int[] shape) {
if (random.getStatePointer() != null) {
return Nd4j.getExecutioner().exec(new org.nd4j.linalg.api.ops.random.impl.UniformDistribution(
Nd4j.createUninitialized(shape, Nd4j.order()), lower, upper), random);
} else {
INDArray ret = Nd4j.createUninitialized(shape, Nd4j.order());
Iterator<int[]> idxIter = new NdIndexIterator(shape); //For consistent values irrespective of c vs. fortran ordering
int len = ret.length();
for (int i = 0; i < len; i++) {
ret.putScalar(idxIter.next(), sample());
}
return ret;
}
}
}
| apache-2.0 |
wso2/carbon-identity-framework | components/identity-core/org.wso2.carbon.identity.core/src/main/java/org/wso2/carbon/identity/core/cache/CacheKey.java | 1305 | /*
* Copyright (c) 2021, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.core.cache;
import java.io.Serializable;
/**
* Cache key class. Any value that acts as a cache key must encapsulated with a class overriding
* from this class.
*/
public abstract class CacheKey implements Serializable {
private static final long serialVersionUID = -7198600199516906683L;
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
}
| apache-2.0 |
smartnews/presto | presto-main/src/test/java/io/prestosql/sql/planner/TestRecursiveCte.java | 5447 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.planner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.prestosql.Session;
import io.prestosql.sql.planner.assertions.BasePlanTest;
import io.prestosql.sql.planner.assertions.PlanMatchPattern;
import io.prestosql.testing.LocalQueryRunner;
import org.intellij.lang.annotations.Language;
import org.testng.annotations.Test;
import static io.prestosql.sql.planner.LogicalPlanner.Stage.CREATED;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.anyTree;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.filter;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.functionCall;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.project;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.union;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values;
import static io.prestosql.sql.planner.assertions.PlanMatchPattern.window;
import static io.prestosql.testing.TestingSession.testSessionBuilder;
public class TestRecursiveCte
extends BasePlanTest
{
@Override
protected LocalQueryRunner createLocalQueryRunner()
{
Session.SessionBuilder sessionBuilder = testSessionBuilder()
.setSystemProperty("max_recursion_depth", "1");
return LocalQueryRunner.create(sessionBuilder.build());
}
@Test
public void testRecursiveQuery()
{
@Language("SQL") String sql = "WITH RECURSIVE t(n) AS (" +
" SELECT 1" +
" UNION ALL" +
" SELECT n + 2 FROM t WHERE n < 6" +
" )" +
" SELECT * from t";
PlanMatchPattern pattern =
anyTree(
union(
// base term
project(project(project(
ImmutableMap.of("expr", expression("1")),
values()))),
// first recursion step
project(project(project(
ImmutableMap.of("expr_0", expression("expr + 2")),
filter(
"expr < 6",
project(project(project(
ImmutableMap.of("expr", expression("1")),
values()))))))),
// "post-recursion" step with convergence assertion
filter(
"IF((count >= BIGINT '0'), " +
"CAST(fail(CAST('Recursion depth limit exceeded (1). Use ''max_recursion_depth'' session property to modify the limit.' AS varchar)) AS boolean), " +
"true)",
window(windowBuilder -> windowBuilder
.addFunction(
"count",
functionCall("count", ImmutableList.of())),
project(project(project(
ImmutableMap.of("expr_1", expression("expr + 2")),
filter(
"expr < 6",
project(
ImmutableMap.of("expr", expression("expr_0")),
project(project(project(
ImmutableMap.of("expr_0", expression("expr + 2")),
filter(
"expr < 6",
project(project(project(
ImmutableMap.of("expr", expression("1")),
values()))))))))))))))));
assertPlan(sql, CREATED, pattern);
}
}
| apache-2.0 |
NJUJYB/disYarn | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/NodeInfo.java | 4510 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.webapp.dao;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport;
@XmlRootElement(name = "node")
@XmlAccessorType(XmlAccessType.FIELD)
public class NodeInfo {
protected String rack;
protected NodeState state;
protected String id;
protected String nodeHostName;
protected String nodeHTTPAddress;
protected long lastHealthUpdate;
protected String version;
protected String healthReport;
protected int numContainers;
protected long usedMemoryMB;
protected long availMemoryMB;
protected long usedVirtualCores;
protected long availableVirtualCores;
protected ArrayList<String> nodeLabels = new ArrayList<String>();
protected ResourceUtilizationInfo resourceUtilization;
public NodeInfo() {
} // JAXB needs this
public NodeInfo(RMNode ni, ResourceScheduler sched) {
NodeId id = ni.getNodeID();
SchedulerNodeReport report = sched.getNodeReport(id);
this.numContainers = 0;
this.usedMemoryMB = 0;
this.availMemoryMB = 0;
if (report != null) {
this.numContainers = report.getNumContainers();
this.usedMemoryMB = report.getUsedResource().getMemory();
this.availMemoryMB = report.getAvailableResource().getMemory();
this.usedVirtualCores = report.getUsedResource().getVirtualCores();
this.availableVirtualCores = report.getAvailableResource().getVirtualCores();
}
this.id = id.toString();
this.rack = ni.getRackName();
this.nodeHostName = ni.getHostName();
this.state = ni.getState();
this.nodeHTTPAddress = ni.getHttpAddress();
this.lastHealthUpdate = ni.getLastHealthReportTime();
this.healthReport = String.valueOf(ni.getHealthReport());
this.version = ni.getNodeManagerVersion();
// add labels
Set<String> labelSet = ni.getNodeLabels();
if (labelSet != null) {
nodeLabels.addAll(labelSet);
Collections.sort(nodeLabels);
}
// update node and containers resource utilization
this.resourceUtilization = new ResourceUtilizationInfo(ni);
}
public String getRack() {
return this.rack;
}
public String getState() {
return String.valueOf(this.state);
}
public String getNodeId() {
return this.id;
}
public String getNodeHTTPAddress() {
return this.nodeHTTPAddress;
}
public void setNodeHTTPAddress(String nodeHTTPAddress) {
this.nodeHTTPAddress = nodeHTTPAddress;
}
public long getLastHealthUpdate() {
return this.lastHealthUpdate;
}
public String getVersion() {
return this.version;
}
public String getHealthReport() {
return this.healthReport;
}
public int getNumContainers() {
return this.numContainers;
}
public long getUsedMemory() {
return this.usedMemoryMB;
}
public long getAvailableMemory() {
return this.availMemoryMB;
}
public long getUsedVirtualCores() {
return this.usedVirtualCores;
}
public long getAvailableVirtualCores() {
return this.availableVirtualCores;
}
public ArrayList<String> getNodeLabels() {
return this.nodeLabels;
}
public ResourceUtilizationInfo getResourceUtilization() {
return this.resourceUtilization;
}
}
| apache-2.0 |
kjniemi/activemq-artemis | artemis-commons/src/main/java/org/apache/activemq/artemis/logs/AuditLogger.java | 130904 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.logs;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.Logger;
import org.jboss.logging.annotations.Cause;
import org.jboss.logging.annotations.LogMessage;
import org.jboss.logging.annotations.Message;
import org.jboss.logging.annotations.MessageLogger;
import javax.management.ObjectName;
import javax.security.auth.Subject;
import java.security.AccessController;
import java.security.Principal;
import java.util.Arrays;
import java.util.Set;
/**
* Logger Code 60
*
* each message id must be 6 digits long starting with 60, the 3rd digit donates the level so
*
* INF0 1
* WARN 2
* DEBUG 3
* ERROR 4
* TRACE 5
* FATAL 6
*
* so an INFO message would be 601000 to 601999
*/
@MessageLogger(projectCode = "AMQ")
public interface AuditLogger extends BasicLogger {
AuditLogger LOGGER = Logger.getMessageLogger(AuditLogger.class, "org.apache.activemq.audit.base");
AuditLogger RESOURCE_LOGGER = Logger.getMessageLogger(AuditLogger.class, "org.apache.activemq.audit.resource");
AuditLogger MESSAGE_LOGGER = Logger.getMessageLogger(AuditLogger.class, "org.apache.activemq.audit.message");
ThreadLocal<String> remoteUrl = new ThreadLocal<>();
ThreadLocal<Subject> currentCaller = new ThreadLocal<>();
static boolean isAnyLoggingEnabled() {
return isEnabled() || isMessageEnabled() || isResourceLoggingEnabled();
}
static boolean isEnabled() {
return LOGGER.isEnabled(Logger.Level.INFO);
}
static boolean isResourceLoggingEnabled() {
return RESOURCE_LOGGER.isEnabled(Logger.Level.INFO);
}
static boolean isMessageEnabled() {
return MESSAGE_LOGGER.isEnabled(Logger.Level.INFO);
}
static String getCaller() {
Subject subject = Subject.getSubject(AccessController.getContext());
if (subject == null) {
subject = currentCaller.get();
}
return getCaller(subject);
}
static String getCaller(String user) {
Subject subject = Subject.getSubject(AccessController.getContext());
if (subject == null) {
subject = currentCaller.get();
}
if (subject == null) {
return user + (remoteUrl.get() == null ? "@unknown" : remoteUrl.get());
}
return getCaller(subject);
}
static String getCaller(Subject subject) {
String user = "anonymous";
String roles = "";
String url = remoteUrl.get() == null ? "@unknown" : remoteUrl.get();
if (subject != null) {
Set<Principal> principals = subject.getPrincipals();
for (Principal principal : principals) {
if (principal.getClass().getName().endsWith("UserPrincipal")) {
user = principal.getName();
} else if (principal.getClass().getName().endsWith("RolePrincipal")) {
roles = "(" + principal.getName() + ")";
}
}
}
return user + roles + url;
}
static void setCurrentCaller(Subject caller) {
currentCaller.set(caller);
}
static void setRemoteAddress(String remoteAddress) {
String actualAddress;
if (remoteAddress.startsWith("/")) {
actualAddress = "@" + remoteAddress.substring(1);
} else {
actualAddress = "@" + remoteAddress;
}
remoteUrl.set(actualAddress);
}
static String getRemoteAddress() {
return remoteUrl.get();
}
static String arrayToString(Object value) {
if (value == null) return "";
final String prefix = "with parameters: ";
if (value instanceof long[]) {
return prefix + Arrays.toString((long[])value);
} else if (value instanceof int[]) {
return prefix + Arrays.toString((int[])value);
} else if (value instanceof char[]) {
return prefix + Arrays.toString((char[])value);
} else if (value instanceof byte[]) {
return prefix + Arrays.toString((byte[])value);
} else if (value instanceof float[]) {
return prefix + Arrays.toString((float[])value);
} else if (value instanceof short[]) {
return prefix + Arrays.toString((short[])value);
} else if (value instanceof double[]) {
return prefix + Arrays.toString((double[])value);
} else if (value instanceof boolean[]) {
return prefix + Arrays.toString((boolean[])value);
} else if (value instanceof Object[]) {
return prefix + Arrays.toString((Object[])value);
} else {
return prefix + value.toString();
}
}
static void getRoutingTypes(Object source) {
LOGGER.getRoutingTypes(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601000, value = "User {0} is getting routing type property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getRoutingTypes(String user, Object source, Object... args);
static void getRoutingTypesAsJSON(Object source) {
LOGGER.getRoutingTypesAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601001, value = "User {0} is getting routing type property as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getRoutingTypesAsJSON(String user, Object source, Object... args);
static void getQueueNames(Object source, Object... args) {
LOGGER.getQueueNames(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601002, value = "User {0} is getting queue names on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getQueueNames(String user, Object source, Object... args);
static void getBindingNames(Object source) {
LOGGER.getBindingNames(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601003, value = "User {0} is getting binding names on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getBindingNames(String user, Object source, Object... args);
static void getRoles(Object source, Object... args) {
LOGGER.getRoles(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601004, value = "User {0} is getting roles on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getRoles(String user, Object source, Object... args);
static void getRolesAsJSON(Object source, Object... args) {
LOGGER.getRolesAsJSON(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601005, value = "User {0} is getting roles as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getRolesAsJSON(String user, Object source, Object... args);
static void getNumberOfBytesPerPage(Object source) {
LOGGER.getNumberOfBytesPerPage(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601006, value = "User {0} is getting number of bytes per page on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getNumberOfBytesPerPage(String user, Object source, Object... args);
static void getAddressSize(Object source) {
LOGGER.getAddressSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601007, value = "User {0} is getting address size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getAddressSize(String user, Object source, Object... args);
static void getNumberOfMessages(Object source) {
LOGGER.getNumberOfMessages(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601008, value = "User {0} is getting number of messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getNumberOfMessages(String user, Object source, Object... args);
static void isPaging(Object source) {
LOGGER.isPaging(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601009, value = "User {0} is getting isPaging on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isPaging(String user, Object source, Object... args);
static void getNumberOfPages(Object source) {
LOGGER.getNumberOfPages(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601010, value = "User {0} is getting number of pages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getNumberOfPages(String user, Object source, Object... args);
static void getRoutedMessageCount(Object source) {
LOGGER.getRoutedMessageCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601011, value = "User {0} is getting routed message count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getRoutedMessageCount(String user, Object source, Object... args);
static void getUnRoutedMessageCount(Object source) {
LOGGER.getUnRoutedMessageCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601012, value = "User {0} is getting unrouted message count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getUnRoutedMessageCount(String user, Object source, Object... args);
static void sendMessage(Object source, String user, Object... args) {
LOGGER.sendMessage(getCaller(user), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601013, value = "User {0} is sending a message on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void sendMessage(String user, Object source, Object... args);
static void getName(Object source) {
LOGGER.getName(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601014, value = "User {0} is getting name on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getName(String user, Object source, Object... args);
static void getAddress(Object source) {
LOGGER.getAddress(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601015, value = "User {0} is getting address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getAddress(String user, Object source, Object... args);
static void getFilter(Object source) {
LOGGER.getFilter(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601016, value = "User {0} is getting filter on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getFilter(String user, Object source, Object... args);
static void isDurable(Object source) {
LOGGER.isDurable(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601017, value = "User {0} is getting durable property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isDurable(String user, Object source, Object... args);
static void getMessageCount(Object source) {
LOGGER.getMessageCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601018, value = "User {0} is getting message count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMessageCount(String user, Object source, Object... args);
static void getMBeanInfo(Object source) {
LOGGER.getMBeanInfo(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601019, value = "User {0} is getting mbean info on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMBeanInfo(String user, Object source, Object... args);
static void getFactoryClassName(Object source) {
LOGGER.getFactoryClassName(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601020, value = "User {0} is getting factory class name on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getFactoryClassName(String user, Object source, Object... args);
static void getParameters(Object source) {
LOGGER.getParameters(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601021, value = "User {0} is getting parameters on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getParameters(String user, Object source, Object... args);
static void reload(Object source) {
LOGGER.reload(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601022, value = "User {0} is doing reload on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void reload(String user, Object source, Object... args);
static void isStarted(Object source) {
LOGGER.isStarted(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601023, value = "User {0} is querying isStarted on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isStarted(String user, Object source, Object... args);
static void startAcceptor(Object source) {
LOGGER.startAcceptor(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601024, value = "User {0} is starting an acceptor on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void startAcceptor(String user, Object source, Object... args);
static void stopAcceptor(Object source) {
LOGGER.stopAcceptor(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601025, value = "User {0} is stopping an acceptor on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void stopAcceptor(String user, Object source, Object... args);
static void getVersion(Object source) {
LOGGER.getVersion(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601026, value = "User {0} is getting version on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getVersion(String user, Object source, Object... args);
static void isBackup(Object source) {
LOGGER.isBackup(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601027, value = "User {0} is querying isBackup on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isBackup(String user, Object source, Object... args);
static void isSharedStore(Object source) {
LOGGER.isSharedStore(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601028, value = "User {0} is querying isSharedStore on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isSharedStore(String user, Object source, Object... args);
static void getBindingsDirectory(Object source) {
LOGGER.getBindingsDirectory(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601029, value = "User {0} is getting bindings directory on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getBindingsDirectory(String user, Object source, Object... args);
static void getIncomingInterceptorClassNames(Object source) {
LOGGER.getIncomingInterceptorClassNames(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601030, value = "User {0} is getting incoming interceptor class names on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getIncomingInterceptorClassNames(String user, Object source, Object... args);
static void getOutgoingInterceptorClassNames(Object source) {
LOGGER.getOutgoingInterceptorClassNames(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601031, value = "User {0} is getting outgoing interceptor class names on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getOutgoingInterceptorClassNames(String user, Object source, Object... args);
static void getJournalBufferSize(Object source) {
LOGGER.getJournalBufferSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601032, value = "User {0} is getting journal buffer size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getJournalBufferSize(String user, Object source, Object... args);
static void getJournalBufferTimeout(Object source) {
LOGGER.getJournalBufferTimeout(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601033, value = "User {0} is getting journal buffer timeout on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getJournalBufferTimeout(String user, Object source, Object... args);
static void setFailoverOnServerShutdown(Object source, Object... args) {
LOGGER.setFailoverOnServerShutdown(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601034, value = "User {0} is setting failover on server shutdown on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void setFailoverOnServerShutdown(String user, Object source, Object... args);
static void isFailoverOnServerShutdown(Object source) {
LOGGER.isFailoverOnServerShutdown(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601035, value = "User {0} is querying is-failover-on-server-shutdown on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isFailoverOnServerShutdown(String user, Object source, Object... args);
static void getJournalMaxIO(Object source) {
LOGGER.getJournalMaxIO(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601036, value = "User {0} is getting journal's max io on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getJournalMaxIO(String user, Object source, Object... args);
static void getJournalDirectory(Object source) {
LOGGER.getJournalDirectory(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601037, value = "User {0} is getting journal directory on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getJournalDirectory(String user, Object source, Object... args);
static void getJournalFileSize(Object source) {
LOGGER.getJournalFileSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601038, value = "User {0} is getting journal file size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getJournalFileSize(String user, Object source, Object... args);
static void getJournalMinFiles(Object source) {
LOGGER.getJournalMinFiles(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601039, value = "User {0} is getting journal min files on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getJournalMinFiles(String user, Object source, Object... args);
static void getJournalCompactMinFiles(Object source) {
LOGGER.getJournalCompactMinFiles(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601040, value = "User {0} is getting journal compact min files on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getJournalCompactMinFiles(String user, Object source, Object... args);
static void getJournalCompactPercentage(Object source) {
LOGGER.getJournalCompactPercentage(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601041, value = "User {0} is getting journal compact percentage on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getJournalCompactPercentage(String user, Object source, Object... args);
static void isPersistenceEnabled(Object source) {
LOGGER.isPersistenceEnabled(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601042, value = "User {0} is querying persistence enabled on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isPersistenceEnabled(String user, Object source, Object... args);
static void getJournalType(Object source) {
LOGGER.getJournalType(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601043, value = "User {0} is getting journal type on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getJournalType(String user, Object source, Object... args);
static void getPagingDirectory(Object source) {
LOGGER.getPagingDirectory(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601044, value = "User {0} is getting paging directory on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getPagingDirectory(String user, Object source, Object... args);
static void getScheduledThreadPoolMaxSize(Object source) {
LOGGER.getScheduledThreadPoolMaxSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601045, value = "User {0} is getting scheduled threadpool max size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getScheduledThreadPoolMaxSize(String user, Object source, Object... args);
static void getThreadPoolMaxSize(Object source) {
LOGGER.getThreadPoolMaxSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601046, value = "User {0} is getting threadpool max size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getThreadPoolMaxSize(String user, Object source, Object... args);
static void getSecurityInvalidationInterval(Object source) {
LOGGER.getSecurityInvalidationInterval(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601047, value = "User {0} is getting security invalidation interval on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getSecurityInvalidationInterval(String user, Object source, Object... args);
static void isClustered(Object source) {
LOGGER.isClustered(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601048, value = "User {0} is querying is-clustered on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isClustered(String user, Object source, Object... args);
static void isCreateBindingsDir(Object source) {
LOGGER.isCreateBindingsDir(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601049, value = "User {0} is querying is-create-bindings-dir on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isCreateBindingsDir(String user, Object source, Object... args);
static void isCreateJournalDir(Object source) {
LOGGER.isCreateJournalDir(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601050, value = "User {0} is querying is-create-journal-dir on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isCreateJournalDir(String user, Object source, Object... args);
static void isJournalSyncNonTransactional(Object source) {
LOGGER.isJournalSyncNonTransactional(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601051, value = "User {0} is querying is-journal-sync-non-transactional on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isJournalSyncNonTransactional(String user, Object source, Object... args);
static void isJournalSyncTransactional(Object source) {
LOGGER.isJournalSyncTransactional(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601052, value = "User {0} is querying is-journal-sync-transactional on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isJournalSyncTransactional(String user, Object source, Object... args);
static void isSecurityEnabled(Object source) {
LOGGER.isSecurityEnabled(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601053, value = "User {0} is querying is-security-enabled on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isSecurityEnabled(String user, Object source, Object... args);
static void isAsyncConnectionExecutionEnabled(Object source) {
LOGGER.isAsyncConnectionExecutionEnabled(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601054, value = "User {0} is query is-async-connection-execution-enabled on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isAsyncConnectionExecutionEnabled(String user, Object source, Object... args);
static void getDiskScanPeriod(Object source) {
LOGGER.getDiskScanPeriod(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601055, value = "User {0} is getting disk scan period on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDiskScanPeriod(String user, Object source, Object... args);
static void getMaxDiskUsage(Object source) {
LOGGER.getMaxDiskUsage(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601056, value = "User {0} is getting max disk usage on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMaxDiskUsage(String user, Object source, Object... args);
static void getGlobalMaxSize(Object source) {
LOGGER.getGlobalMaxSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601057, value = "User {0} is getting global max size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getGlobalMaxSize(String user, Object source, Object... args);
static void getAddressMemoryUsage(Object source) {
LOGGER.getAddressMemoryUsage(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601058, value = "User {0} is getting address memory usage on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getAddressMemoryUsage(String user, Object source, Object... args);
static void getAddressMemoryUsagePercentage(Object source) {
LOGGER.getAddressMemoryUsagePercentage(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601059, value = "User {0} is getting address memory usage percentage on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getAddressMemoryUsagePercentage(String user, Object source, Object... args);
static void freezeReplication(Object source) {
LOGGER.freezeReplication(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601060, value = "User {0} is freezing replication on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void freezeReplication(String user, Object source, Object... args);
static void createAddress(Object source, Object... args) {
LOGGER.createAddress(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601061, value = "User {0} is creating an address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void createAddress(String user, Object source, Object... args);
static void updateAddress(Object source, Object... args) {
LOGGER.updateAddress(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601062, value = "User {0} is updating an address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void updateAddress(String user, Object source, Object... args);
static void deleteAddress(Object source, Object... args) {
LOGGER.deleteAddress(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601063, value = "User {0} is deleting an address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void deleteAddress(String user, Object source, Object... args);
static void deployQueue(Object source, Object... args) {
LOGGER.deployQueue(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601064, value = "User {0} is creating a queue on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void deployQueue(String user, Object source, Object... args);
static void createQueue(Object source, String user, Object... args) {
RESOURCE_LOGGER.createQueue(getCaller(user), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601065, value = "User {0} is creating a queue on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void createQueue(String user, Object source, Object... args);
static void updateQueue(Object source, Object... args) {
LOGGER.updateQueue(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601066, value = "User {0} is updating a queue on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void updateQueue(String user, Object source, Object... args);
static void getClusterConnectionNames(Object source) {
LOGGER.getClusterConnectionNames(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601067, value = "User {0} is getting cluster connection names on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getClusterConnectionNames(String user, Object source, Object... args);
static void getUptime(Object source) {
LOGGER.getUptime(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601068, value = "User {0} is getting uptime on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getUptime(String user, Object source, Object... args);
static void getUptimeMillis(Object source) {
LOGGER.getUptimeMillis(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601069, value = "User {0} is getting uptime in milliseconds on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getUptimeMillis(String user, Object source, Object... args);
static void isReplicaSync(Object source) {
LOGGER.isReplicaSync(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601070, value = "User {0} is querying is-replica-sync on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isReplicaSync(String user, Object source, Object... args);
static void getAddressNames(Object source) {
LOGGER.getAddressNames(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601071, value = "User {0} is getting address names on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getAddressNames(String user, Object source, Object... args);
static void destroyQueue(Object source, String user, Object... args) {
LOGGER.destroyQueue(getCaller(user), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601072, value = "User {0} is deleting a queue on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void destroyQueue(String user, Object source, Object... args);
static void getAddressInfo(Object source, Object... args) {
LOGGER.getAddressInfo(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601073, value = "User {0} is getting address info on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getAddressInfo(String user, Object source, Object... args);
static void listBindingsForAddress(Object source, Object... args) {
LOGGER.listBindingsForAddress(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601074, value = "User {0} is listing bindings for address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listBindingsForAddress(String user, Object source, Object... args);
static void listAddresses(Object source, Object... args) {
LOGGER.listAddresses(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601075, value = "User {0} is listing addresses on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listAddresses(String user, Object source, Object... args);
static void getConnectionCount(Object source, Object... args) {
LOGGER.getConnectionCount(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601076, value = "User {0} is getting connection count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getConnectionCount(String user, Object source, Object... args);
static void getTotalConnectionCount(Object source) {
LOGGER.getTotalConnectionCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601077, value = "User {0} is getting total connection count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getTotalConnectionCount(String user, Object source, Object... args);
static void getTotalMessageCount(Object source) {
LOGGER.getTotalMessageCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601078, value = "User {0} is getting total message count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getTotalMessageCount(String user, Object source, Object... args);
static void getTotalMessagesAdded(Object source) {
LOGGER.getTotalMessagesAdded(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601079, value = "User {0} is getting total messages added on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getTotalMessagesAdded(String user, Object source, Object... args);
static void getTotalMessagesAcknowledged(Object source) {
LOGGER.getTotalMessagesAcknowledged(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601080, value = "User {0} is getting total messages acknowledged on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getTotalMessagesAcknowledged(String user, Object source, Object... args);
static void getTotalConsumerCount(Object source) {
LOGGER.getTotalConsumerCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601081, value = "User {0} is getting total consumer count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getTotalConsumerCount(String user, Object source, Object... args);
static void enableMessageCounters(Object source) {
LOGGER.enableMessageCounters(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601082, value = "User {0} is enabling message counters on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void enableMessageCounters(String user, Object source, Object... args);
static void disableMessageCounters(Object source) {
LOGGER.disableMessageCounters(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601083, value = "User {0} is disabling message counters on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void disableMessageCounters(String user, Object source, Object... args);
static void resetAllMessageCounters(Object source) {
LOGGER.resetAllMessageCounters(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601084, value = "User {0} is resetting all message counters on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void resetAllMessageCounters(String user, Object source, Object... args);
static void resetAllMessageCounterHistories(Object source) {
LOGGER.resetAllMessageCounterHistories(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601085, value = "User {0} is resetting all message counter histories on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void resetAllMessageCounterHistories(String user, Object source, Object... args);
static void isMessageCounterEnabled(Object source) {
LOGGER.isMessageCounterEnabled(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601086, value = "User {0} is querying is-message-counter-enabled on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isMessageCounterEnabled(String user, Object source, Object... args);
static void getMessageCounterSamplePeriod(Object source) {
LOGGER.getMessageCounterSamplePeriod(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601087, value = "User {0} is getting message counter sample period on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMessageCounterSamplePeriod(String user, Object source, Object... args);
static void setMessageCounterSamplePeriod(Object source, Object... args) {
LOGGER.setMessageCounterSamplePeriod(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601088, value = "User {0} is setting message counter sample period on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void setMessageCounterSamplePeriod(String user, Object source, Object... args);
static void getMessageCounterMaxDayCount(Object source) {
LOGGER.getMessageCounterMaxDayCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601089, value = "User {0} is getting message counter max day count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMessageCounterMaxDayCount(String user, Object source, Object... args);
static void setMessageCounterMaxDayCount(Object source, Object... args) {
LOGGER.setMessageCounterMaxDayCount(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601090, value = "User {0} is setting message counter max day count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void setMessageCounterMaxDayCount(String user, Object source, Object... args);
static void listPreparedTransactions(Object source) {
LOGGER.listPreparedTransactions(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601091, value = "User {0} is listing prepared transactions on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listPreparedTransactions(String user, Object source, Object... args);
static void listPreparedTransactionDetailsAsJSON(Object source) {
LOGGER.listPreparedTransactionDetailsAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601092, value = "User {0} is listing prepared transaction details as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listPreparedTransactionDetailsAsJSON(String user, Object source, Object... args);
static void listPreparedTransactionDetailsAsHTML(Object source, Object... args) {
LOGGER.listPreparedTransactionDetailsAsHTML(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601093, value = "User {0} is listing prepared transaction details as HTML on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listPreparedTransactionDetailsAsHTML(String user, Object source, Object... args);
static void listHeuristicCommittedTransactions(Object source) {
LOGGER.listHeuristicCommittedTransactions(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601094, value = "User {0} is listing heuristic committed transactions on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listHeuristicCommittedTransactions(String user, Object source, Object... args);
static void listHeuristicRolledBackTransactions(Object source) {
LOGGER.listHeuristicRolledBackTransactions(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601095, value = "User {0} is listing heuristic rolled back transactions on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listHeuristicRolledBackTransactions(String user, Object source, Object... args);
static void commitPreparedTransaction(Object source, Object... args) {
LOGGER.commitPreparedTransaction(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601096, value = "User {0} is commiting prepared transaction on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void commitPreparedTransaction(String user, Object source, Object... args);
static void rollbackPreparedTransaction(Object source, Object... args) {
LOGGER.rollbackPreparedTransaction(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601097, value = "User {0} is rolling back prepared transaction on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void rollbackPreparedTransaction(String user, Object source, Object... args);
static void listRemoteAddresses(Object source, Object... args) {
LOGGER.listRemoteAddresses(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601098, value = "User {0} is listing remote addresses on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listRemoteAddresses(String user, Object source, Object... args);
static void closeConnectionsForAddress(Object source, Object... args) {
LOGGER.closeConnectionsForAddress(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601099, value = "User {0} is closing connections for address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void closeConnectionsForAddress(String user, Object source, Object... args);
static void closeConsumerConnectionsForAddress(Object source, Object... args) {
LOGGER.closeConsumerConnectionsForAddress(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601100, value = "User {0} is closing consumer connections for address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void closeConsumerConnectionsForAddress(String user, Object source, Object... args);
static void closeConnectionsForUser(Object source, Object... args) {
LOGGER.closeConnectionsForUser(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601101, value = "User {0} is closing connections for user on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void closeConnectionsForUser(String user, Object source, Object... args);
static void closeConnectionWithID(Object source, Object... args) {
LOGGER.closeConnectionWithID(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601102, value = "User {0} is closing a connection by ID on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void closeConnectionWithID(String user, Object source, Object... args);
static void closeSessionWithID(Object source, Object... args) {
LOGGER.closeSessionWithID(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601103, value = "User {0} is closing session with id on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void closeSessionWithID(String user, Object source, Object... args);
static void closeConsumerWithID(Object source, Object... args) {
LOGGER.closeConsumerWithID(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601104, value = "User {0} is closing consumer with id on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void closeConsumerWithID(String user, Object source, Object... args);
static void listConnectionIDs(Object source) {
LOGGER.listConnectionIDs(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601105, value = "User {0} is listing connection IDs on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listConnectionIDs(String user, Object source, Object... args);
static void listSessions(Object source, Object... args) {
LOGGER.listSessions(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601106, value = "User {0} is listing sessions on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listSessions(String user, Object source, Object... args);
static void listProducersInfoAsJSON(Object source) {
LOGGER.listProducersInfoAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601107, value = "User {0} is listing producers info as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listProducersInfoAsJSON(String user, Object source, Object... args);
static void listConnections(Object source, Object... args) {
LOGGER.listConnections(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601108, value = "User {0} is listing connections on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listConnections(String user, Object source, Object... args);
static void listConsumers(Object source, Object... args) {
LOGGER.listConsumers(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601109, value = "User {0} is listing consumers on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listConsumers(String user, Object source, Object... args);
static void listQueues(Object source, Object... args) {
LOGGER.listQueues(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601110, value = "User {0} is listing queues on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listQueues(String user, Object source, Object... args);
static void listProducers(Object source, Object... args) {
LOGGER.listProducers(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601111, value = "User {0} is listing producers on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listProducers(String user, Object source, Object... args);
static void listConnectionsAsJSON(Object source) {
LOGGER.listConnectionsAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601112, value = "User {0} is listing connections as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listConnectionsAsJSON(String user, Object source, Object... args);
static void listSessionsAsJSON(Object source, Object... args) {
LOGGER.listSessionsAsJSON(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601113, value = "User {0} is listing sessions as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listSessionsAsJSON(String user, Object source, Object... args);
static void listAllSessionsAsJSON(Object source) {
LOGGER.listAllSessionsAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601114, value = "User {0} is listing all sessions as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listAllSessionsAsJSON(String user, Object source, Object... args);
static void listConsumersAsJSON(Object source, Object... args) {
LOGGER.listConsumersAsJSON(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601115, value = "User {0} is listing consumers as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listConsumersAsJSON(String user, Object source, Object... args);
static void listAllConsumersAsJSON(Object source) {
LOGGER.listAllConsumersAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601116, value = "User {0} is listing all consumers as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listAllConsumersAsJSON(String user, Object source, Object... args);
static void getConnectors(Object source) {
LOGGER.getConnectors(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601117, value = "User {0} is getting connectors on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getConnectors(String user, Object source, Object... args);
static void getConnectorsAsJSON(Object source) {
LOGGER.getConnectorsAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601118, value = "User {0} is getting connectors as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getConnectorsAsJSON(String user, Object source, Object... args);
static void addSecuritySettings(Object source, Object... args) {
LOGGER.addSecuritySettings(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601119, value = "User {0} is adding security settings on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void addSecuritySettings(String user, Object source, Object... args);
static void removeSecuritySettings(Object source, Object... args) {
LOGGER.removeSecuritySettings(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601120, value = "User {0} is removing security settings on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void removeSecuritySettings(String user, Object source, Object... args);
static void getAddressSettingsAsJSON(Object source, Object... args) {
LOGGER.getAddressSettingsAsJSON(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601121, value = "User {0} is getting address settings as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getAddressSettingsAsJSON(String user, Object source, Object... args);
static void addAddressSettings(Object source, Object... args) {
LOGGER.addAddressSettings(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601122, value = "User {0} is adding addressSettings on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void addAddressSettings(String user, Object source, Object... args);
static void removeAddressSettings(Object source, Object... args) {
LOGGER.removeAddressSettings(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601123, value = "User {0} is removing address settings on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void removeAddressSettings(String user, Object source, Object... args);
static void getDivertNames(Object source) {
LOGGER.getDivertNames(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601124, value = "User {0} is getting divert names on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDivertNames(String user, Object source, Object... args);
static void createDivert(Object source, Object... args) {
LOGGER.createDivert(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601125, value = "User {0} is creating a divert on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void createDivert(String user, Object source, Object... args);
static void destroyDivert(Object source, Object... args) {
LOGGER.destroyDivert(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601126, value = "User {0} is destroying a divert on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void destroyDivert(String user, Object source, Object... args);
static void getBridgeNames(Object source) {
LOGGER.getBridgeNames(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601127, value = "User {0} is getting bridge names on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getBridgeNames(String user, Object source, Object... args);
static void createBridge(Object source, Object... args) {
LOGGER.createBridge(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601128, value = "User {0} is creating a bridge on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void createBridge(String user, Object source, Object... args);
static void destroyBridge(Object source, Object... args) {
LOGGER.destroyBridge(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601129, value = "User {0} is destroying a bridge on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void destroyBridge(String user, Object source, Object... args);
static void createConnectorService(Object source, Object... args) {
LOGGER.createConnectorService(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601130, value = "User {0} is creating connector service on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void createConnectorService(String user, Object source, Object... args);
static void destroyConnectorService(Object source, Object... args) {
LOGGER.destroyConnectorService(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601131, value = "User {0} is destroying connector service on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void destroyConnectorService(String user, Object source, Object... args);
static void getConnectorServices(Object source, Object... args) {
LOGGER.getConnectorServices(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601132, value = "User {0} is getting connector services on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getConnectorServices(String user, Object source, Object... args);
static void forceFailover(Object source) {
LOGGER.forceFailover(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601133, value = "User {0} is forceing a failover on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void forceFailover(String user, Object source, Object... args);
static void scaleDown(Object source, Object... args) {
LOGGER.scaleDown(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601134, value = "User {0} is performing scale down on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void scaleDown(String user, Object source, Object... args);
static void listNetworkTopology(Object source) {
LOGGER.listNetworkTopology(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601135, value = "User {0} is listing network topology on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listNetworkTopology(String user, Object source, Object... args);
static void removeNotificationListener(Object source, Object... args) {
LOGGER.removeNotificationListener(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601136, value = "User {0} is removing notification listener on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void removeNotificationListener(String user, Object source, Object... args);
static void addNotificationListener(Object source, Object... args) {
LOGGER.addNotificationListener(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601137, value = "User {0} is adding notification listener on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void addNotificationListener(String user, Object source, Object... args);
static void getNotificationInfo(Object source) {
LOGGER.getNotificationInfo(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601138, value = "User {0} is getting notification info on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getNotificationInfo(String user, Object source, Object... args);
static void getConnectionTTLOverride(Object source) {
LOGGER.getConnectionTTLOverride(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601139, value = "User {0} is getting connection ttl override on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getConnectionTTLOverride(String user, Object source, Object... args);
static void getIDCacheSize(Object source) {
LOGGER.getIDCacheSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601140, value = "User {0} is getting ID cache size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getIDCacheSize(String user, Object source, Object... args);
static void getLargeMessagesDirectory(Object source) {
LOGGER.getLargeMessagesDirectory(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601141, value = "User {0} is getting large message directory on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getLargeMessagesDirectory(String user, Object source, Object... args);
static void getManagementAddress(Object source) {
LOGGER.getManagementAddress(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601142, value = "User {0} is getting management address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getManagementAddress(String user, Object source, Object... args);
static void getNodeID(Object source) {
LOGGER.getNodeID(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601143, value = "User {0} is getting node ID on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getNodeID(String user, Object source, Object... args);
static void getManagementNotificationAddress(Object source) {
LOGGER.getManagementNotificationAddress(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601144, value = "User {0} is getting management notification address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getManagementNotificationAddress(String user, Object source, Object... args);
static void getMessageExpiryScanPeriod(Object source) {
LOGGER.getMessageExpiryScanPeriod(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601145, value = "User {0} is getting message expiry scan period on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMessageExpiryScanPeriod(String user, Object source, Object... args);
static void getMessageExpiryThreadPriority(Object source) {
LOGGER.getMessageExpiryThreadPriority(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601146, value = "User {0} is getting message expiry thread priority on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMessageExpiryThreadPriority(String user, Object source, Object... args);
static void getTransactionTimeout(Object source) {
LOGGER.getTransactionTimeout(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601147, value = "User {0} is getting transaction timeout on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getTransactionTimeout(String user, Object source, Object... args);
static void getTransactionTimeoutScanPeriod(Object source) {
LOGGER.getTransactionTimeoutScanPeriod(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601148, value = "User {0} is getting transaction timeout scan period on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getTransactionTimeoutScanPeriod(String user, Object source, Object... args);
static void isPersistDeliveryCountBeforeDelivery(Object source) {
LOGGER.isPersistDeliveryCountBeforeDelivery(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601149, value = "User {0} is querying is-persist-delivery-before-delivery on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isPersistDeliveryCountBeforeDelivery(String user, Object source, Object... args);
static void isPersistIDCache(Object source) {
LOGGER.isPersistIDCache(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601150, value = "User {0} is querying is-persist-id-cache on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isPersistIDCache(String user, Object source, Object... args);
static void isWildcardRoutingEnabled(Object source) {
LOGGER.isWildcardRoutingEnabled(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601151, value = "User {0} is querying is-wildcard-routing-enabled on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isWildcardRoutingEnabled(String user, Object source, Object... args);
static void addUser(Object source, Object... args) {
LOGGER.addUser(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601152, value = "User {0} is adding a user on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void addUser(String user, Object source, Object... args);
static void listUser(Object source, Object... args) {
LOGGER.listUser(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601153, value = "User {0} is listing a user on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listUser(String user, Object source, Object... args);
static void removeUser(Object source, Object... args) {
LOGGER.removeUser(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601154, value = "User {0} is removing a user on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void removeUser(String user, Object source, Object... args);
static void resetUser(Object source, Object... args) {
LOGGER.resetUser(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601155, value = "User {0} is resetting a user on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void resetUser(String user, Object source, Object... args);
static void getUser(Object source) {
LOGGER.getUser(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601156, value = "User {0} is getting user property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getUser(String user, Object source, Object... args);
static void getRoutingType(Object source) {
LOGGER.getRoutingType(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601157, value = "User {0} is getting routing type property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getRoutingType(String user, Object source, Object... args);
static void isTemporary(Object source) {
LOGGER.isTemporary(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601158, value = "User {0} is getting temporary property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isTemporary(String user, Object source, Object... args);
static void getPersistentSize(Object source) {
LOGGER.getPersistentSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601159, value = "User {0} is getting persistent size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getPersistentSize(String user, Object source, Object... args);
static void getDurableMessageCount(Object source) {
LOGGER.getDurableMessageCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601160, value = "User {0} is getting durable message count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDurableMessageCount(String user, Object source, Object... args);
static void getDurablePersistSize(Object source) {
LOGGER.getDurablePersistSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601161, value = "User {0} is getting durable persist size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDurablePersistSize(String user, Object source, Object... args);
static void getConsumerCount(Object source) {
LOGGER.getConsumerCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601162, value = "User {0} is getting consumer count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getConsumerCount(String user, Object source, Object... args);
static void getDeliveringCount(Object source) {
LOGGER.getDeliveringCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601163, value = "User {0} is getting delivering count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDeliveringCount(String user, Object source, Object... args);
static void getDeliveringSize(Object source) {
LOGGER.getDeliveringSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601164, value = "User {0} is getting delivering size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDeliveringSize(String user, Object source, Object... args);
static void getDurableDeliveringCount(Object source) {
LOGGER.getDurableDeliveringCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601165, value = "User {0} is getting durable delivering count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDurableDeliveringCount(String user, Object source, Object... args);
static void getDurableDeliveringSize(Object source) {
LOGGER.getDurableDeliveringSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601166, value = "User {0} is getting durable delivering size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDurableDeliveringSize(String user, Object source, Object... args);
static void getMessagesAdded(Object source) {
LOGGER.getMessagesAdded(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601167, value = "User {0} is getting messages added on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMessagesAdded(String user, Object source, Object... args);
static void getMessagesAcknowledged(Object source) {
LOGGER.getMessagesAcknowledged(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601168, value = "User {0} is getting messages acknowledged on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMessagesAcknowledged(String user, Object source, Object... args);
static void getMessagesExpired(Object source) {
LOGGER.getMessagesExpired(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601169, value = "User {0} is getting messages expired on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMessagesExpired(String user, Object source, Object... args);
static void getMessagesKilled(Object source) {
LOGGER.getMessagesKilled(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601170, value = "User {0} is getting messages killed on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMessagesKilled(String user, Object source, Object... args);
static void getID(Object source) {
LOGGER.getID(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601171, value = "User {0} is getting ID on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getID(String user, Object source, Object... args);
static void getScheduledCount(Object source) {
LOGGER.getScheduledCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601172, value = "User {0} is getting scheduled count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getScheduledCount(String user, Object source, Object... args);
static void getScheduledSize(Object source) {
LOGGER.getScheduledSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601173, value = "User {0} is getting scheduled size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getScheduledSize(String user, Object source, Object... args);
static void getDurableScheduledCount(Object source) {
LOGGER.getDurableScheduledCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601174, value = "User {0} is getting durable scheduled count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDurableScheduledCount(String user, Object source, Object... args);
static void getDurableScheduledSize(Object source) {
LOGGER.getDurableScheduledSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601175, value = "User {0} is getting durable scheduled size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDurableScheduledSize(String user, Object source, Object... args);
static void getDeadLetterAddress(Object source) {
LOGGER.getDeadLetterAddress(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601176, value = "User {0} is getting dead letter address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDeadLetterAddress(String user, Object source, Object... args);
static void getExpiryAddress(Object source) {
LOGGER.getExpiryAddress(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601177, value = "User {0} is getting expiry address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getExpiryAddress(String user, Object source, Object... args);
static void getMaxConsumers(Object source) {
LOGGER.getMaxConsumers(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601178, value = "User {0} is getting max consumers on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMaxConsumers(String user, Object source, Object... args);
static void isPurgeOnNoConsumers(Object source) {
LOGGER.isPurgeOnNoConsumers(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601179, value = "User {0} is getting purge-on-consumers property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isPurgeOnNoConsumers(String user, Object source, Object... args);
static void isConfigurationManaged(Object source) {
LOGGER.isConfigurationManaged(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601180, value = "User {0} is getting configuration-managed property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isConfigurationManaged(String user, Object source, Object... args);
static void isExclusive(Object source) {
LOGGER.isExclusive(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601181, value = "User {0} is getting exclusive property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isExclusive(String user, Object source, Object... args);
static void isLastValue(Object source) {
LOGGER.isLastValue(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601182, value = "User {0} is getting last-value property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isLastValue(String user, Object source, Object... args);
static void listScheduledMessages(Object source) {
LOGGER.listScheduledMessages(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601183, value = "User {0} is listing scheduled messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listScheduledMessages(String user, Object source, Object... args);
static void listScheduledMessagesAsJSON(Object source) {
LOGGER.listScheduledMessagesAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601184, value = "User {0} is listing scheduled messages as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listScheduledMessagesAsJSON(String user, Object source, Object... args);
static void listDeliveringMessages(Object source) {
LOGGER.listDeliveringMessages(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601185, value = "User {0} is listing delivering messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listDeliveringMessages(String user, Object source, Object... args);
static void listDeliveringMessagesAsJSON(Object source) {
LOGGER.listDeliveringMessagesAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601186, value = "User {0} is listing delivering messages as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listDeliveringMessagesAsJSON(String user, Object source, Object... args);
static void listMessages(Object source, Object... args) {
LOGGER.listMessages(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601187, value = "User {0} is listing messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listMessages(String user, Object source, Object... args);
static void listMessagesAsJSON(Object source) {
LOGGER.listMessagesAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601188, value = "User {0} is listing messages as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listMessagesAsJSON(String user, Object source, Object... args);
static void getFirstMessage(Object source) {
LOGGER.getFirstMessage(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601189, value = "User {0} is getting first message on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getFirstMessage(String user, Object source, Object... args);
static void getFirstMessageAsJSON(Object source) {
LOGGER.getFirstMessageAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601190, value = "User {0} is getting first message as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getFirstMessageAsJSON(String user, Object source, Object... args);
static void getFirstMessageTimestamp(Object source) {
LOGGER.getFirstMessageTimestamp(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601191, value = "User {0} is getting first message's timestamp on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getFirstMessageTimestamp(String user, Object source, Object... args);
static void getFirstMessageAge(Object source) {
LOGGER.getFirstMessageAge(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601192, value = "User {0} is getting first message's age on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getFirstMessageAge(String user, Object source, Object... args);
static void countMessages(Object source, Object... args) {
LOGGER.countMessages(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601193, value = "User {0} is counting messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void countMessages(String user, Object source, Object... args);
static void countDeliveringMessages(Object source, Object... args) {
LOGGER.countDeliveringMessages(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601194, value = "User {0} is counting delivery messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void countDeliveringMessages(String user, Object source, Object... args);
static void removeMessage(Object source, Object... args) {
LOGGER.removeMessage(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601195, value = "User {0} is removing a message on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void removeMessage(String user, Object source, Object... args);
static void removeMessages(Object source, Object... args) {
LOGGER.removeMessages(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601196, value = "User {0} is removing messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void removeMessages(String user, Object source, Object... args);
static void expireMessage(Object source, Object... args) {
LOGGER.expireMessage(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601197, value = "User {0} is expiring messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void expireMessage(String user, Object source, Object... args);
static void expireMessages(Object source, Object... args) {
LOGGER.expireMessages(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601198, value = "User {0} is expiring messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void expireMessages(String user, Object source, Object... args);
static void retryMessage(Object source, Object... args) {
LOGGER.retryMessage(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601199, value = "User {0} is retry sending message on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void retryMessage(String user, Object source, Object... args);
static void retryMessages(Object source) {
LOGGER.retryMessages(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601200, value = "User {0} is retry sending messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void retryMessages(String user, Object source, Object... args);
static void moveMessage(Object source, Object... args) {
LOGGER.moveMessage(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601201, value = "User {0} is moving a message to another queue on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void moveMessage(String user, Object source, Object... args);
static void moveMessages(Object source, Object... args) {
LOGGER.moveMessages(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601202, value = "User {0} is moving messages to another queue on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void moveMessages(String user, Object source, Object... args);
static void sendMessagesToDeadLetterAddress(Object source, Object... args) {
LOGGER.sendMessagesToDeadLetterAddress(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601203, value = "User {0} is sending messages to dead letter address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void sendMessagesToDeadLetterAddress(String user, Object source, Object... args);
static void sendMessageToDeadLetterAddress(Object source, Object... args) {
LOGGER.sendMessageToDeadLetterAddress(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601204, value = "User {0} is sending messages to dead letter address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void sendMessageToDeadLetterAddress(String user, Object source, Object... args);
static void changeMessagesPriority(Object source, Object... args) {
LOGGER.changeMessagesPriority(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601205, value = "User {0} is changing message's priority on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void changeMessagesPriority(String user, Object source, Object... args);
static void changeMessagePriority(Object source, Object... args) {
LOGGER.changeMessagePriority(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601206, value = "User {0} is changing a message's priority on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void changeMessagePriority(String user, Object source, Object... args);
static void listMessageCounter(Object source) {
LOGGER.listMessageCounter(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601207, value = "User {0} is listing message counter on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listMessageCounter(String user, Object source, Object... args);
static void resetMessageCounter(Object source) {
LOGGER.resetMessageCounter(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601208, value = "User {0} is resetting message counter on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void resetMessageCounter(String user, Object source, Object... args);
static void listMessageCounterAsHTML(Object source) {
LOGGER.listMessageCounterAsHTML(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601209, value = "User {0} is listing message counter as HTML on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listMessageCounterAsHTML(String user, Object source, Object... args);
static void listMessageCounterHistory(Object source) {
LOGGER.listMessageCounterHistory(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601210, value = "User {0} is listing message counter history on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listMessageCounterHistory(String user, Object source, Object... args);
static void listMessageCounterHistoryAsHTML(Object source) {
LOGGER.listMessageCounterHistoryAsHTML(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601211, value = "User {0} is listing message counter history as HTML on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listMessageCounterHistoryAsHTML(String user, Object source, Object... args);
static void pause(Object source, Object... args) {
LOGGER.pause(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601212, value = "User {0} is pausing on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void pause(String user, Object source, Object... args);
static void resume(Object source) {
LOGGER.resume(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601213, value = "User {0} is resuming on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void resume(String user, Object source, Object... args);
static void isPaused(Object source) {
LOGGER.isPaused(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601214, value = "User {0} is getting paused property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isPaused(String user, Object source, Object... args);
static void browse(Object source, Object... args) {
LOGGER.browse(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601215, value = "User {0} is browsing a queue on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void browse(String user, Object source, Object... args);
static void flushExecutor(Object source) {
LOGGER.flushExecutor(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601216, value = "User {0} is flushing executor on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void flushExecutor(String user, Object source, Object... args);
static void resetAllGroups(Object source) {
LOGGER.resetAllGroups(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601217, value = "User {0} is resetting all groups on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void resetAllGroups(String user, Object source, Object... args);
static void resetGroup(Object source, Object... args) {
LOGGER.resetGroup(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601218, value = "User {0} is resetting group on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void resetGroup(String user, Object source, Object... args);
static void getGroupCount(Object source, Object... args) {
LOGGER.getGroupCount(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601219, value = "User {0} is getting group count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getGroupCount(String user, Object source, Object... args);
static void listGroupsAsJSON(Object source) {
LOGGER.listGroupsAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601220, value = "User {0} is listing groups as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void listGroupsAsJSON(String user, Object source, Object... args);
static void resetMessagesAdded(Object source) {
LOGGER.resetMessagesAdded(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601221, value = "User {0} is resetting added messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void resetMessagesAdded(String user, Object source, Object... args);
static void resetMessagesAcknowledged(Object source) {
LOGGER.resetMessagesAcknowledged(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601222, value = "User {0} is resetting acknowledged messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void resetMessagesAcknowledged(String user, Object source, Object... args);
static void resetMessagesExpired(Object source) {
LOGGER.resetMessagesExpired(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601223, value = "User {0} is resetting expired messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void resetMessagesExpired(String user, Object source, Object... args);
static void resetMessagesKilled(Object source) {
LOGGER.resetMessagesKilled(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601224, value = "User {0} is resetting killed messages on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void resetMessagesKilled(String user, Object source, Object... args);
static void getStaticConnectors(Object source) {
LOGGER.getStaticConnectors(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601225, value = "User {0} is getting static connectors on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getStaticConnectors(String user, Object source, Object... args);
static void getForwardingAddress(Object source) {
LOGGER.getForwardingAddress(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601226, value = "User {0} is getting forwarding address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getForwardingAddress(String user, Object source, Object... args);
static void getQueueName(Object source) {
LOGGER.getQueueName(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601227, value = "User {0} is getting the queue name on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getQueueName(String user, Object source, Object... args);
static void getDiscoveryGroupName(Object source) {
LOGGER.getDiscoveryGroupName(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601228, value = "User {0} is getting discovery group name on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDiscoveryGroupName(String user, Object source, Object... args);
static void getFilterString(Object source) {
LOGGER.getFilterString(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601229, value = "User {0} is getting filter string on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getFilterString(String user, Object source, Object... args);
static void getReconnectAttempts(Object source) {
LOGGER.getReconnectAttempts(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601230, value = "User {0} is getting reconnect attempts on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getReconnectAttempts(String user, Object source, Object... args);
static void getRetryInterval(Object source) {
LOGGER.getRetryInterval(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601231, value = "User {0} is getting retry interval on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getRetryInterval(String user, Object source, Object... args);
static void getRetryIntervalMultiplier(Object source) {
LOGGER.getRetryIntervalMultiplier(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601232, value = "User {0} is getting retry interval multiplier on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getRetryIntervalMultiplier(String user, Object source, Object... args);
static void getTransformerClassName(Object source) {
LOGGER.getTransformerClassName(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601233, value = "User {0} is getting transformer class name on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getTransformerClassName(String user, Object source, Object... args);
static void getTransformerPropertiesAsJSON(Object source) {
LOGGER.getTransformerPropertiesAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601234, value = "User {0} is getting transformer properties as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getTransformerPropertiesAsJSON(String user, Object source, Object... args);
static void getTransformerProperties(Object source) {
LOGGER.getTransformerProperties(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601235, value = "User {0} is getting transformer properties on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getTransformerProperties(String user, Object source, Object... args);
static void isStartedBridge(Object source) {
LOGGER.isStartedBridge(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601236, value = "User {0} is checking if bridge started on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isStartedBridge(String user, Object source, Object... args);
static void isUseDuplicateDetection(Object source) {
LOGGER.isUseDuplicateDetection(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601237, value = "User {0} is querying use duplicate detection on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isUseDuplicateDetection(String user, Object source, Object... args);
static void isHA(Object source) {
LOGGER.isHA(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601238, value = "User {0} is querying isHA on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isHA(String user, Object source, Object... args);
static void startBridge(Object source) {
LOGGER.startBridge(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601239, value = "User {0} is starting a bridge on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void startBridge(String user, Object source, Object... args);
static void stopBridge(Object source) {
LOGGER.stopBridge(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601240, value = "User {0} is stopping a bridge on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void stopBridge(String user, Object source, Object... args);
static void getMessagesPendingAcknowledgement(Object source) {
LOGGER.getMessagesPendingAcknowledgement(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601241, value = "User {0} is getting messages pending acknowledgement on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMessagesPendingAcknowledgement(String user, Object source, Object... args);
static void getMetrics(Object source) {
LOGGER.getMetrics(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601242, value = "User {0} is getting metrics on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMetrics(String user, Object source, Object... args);
static void getBroadcastPeriod(Object source) {
LOGGER.getBroadcastPeriod(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601243, value = "User {0} is getting broadcast period on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getBroadcastPeriod(String user, Object source, Object... args);
static void getConnectorPairs(Object source) {
LOGGER.getConnectorPairs(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601244, value = "User {0} is getting connector pairs on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getConnectorPairs(String user, Object source, Object... args);
static void getConnectorPairsAsJSON(Object source) {
LOGGER.getConnectorPairsAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601245, value = "User {0} is getting connector pairs as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getConnectorPairsAsJSON(String user, Object source, Object... args);
static void getGroupAddress(Object source) {
LOGGER.getGroupAddress(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601246, value = "User {0} is getting group address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getGroupAddress(String user, Object source, Object... args);
static void getGroupPort(Object source) {
LOGGER.getGroupPort(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601247, value = "User {0} is getting group port on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getGroupPort(String user, Object source, Object... args);
static void getLocalBindPort(Object source) {
LOGGER.getLocalBindPort(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601248, value = "User {0} is getting local binding port on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getLocalBindPort(String user, Object source, Object... args);
static void startBroadcastGroup(Object source) {
LOGGER.startBroadcastGroup(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601249, value = "User {0} is starting broadcasting group on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void startBroadcastGroup(String user, Object source, Object... args);
static void stopBroadcastGroup(Object source) {
LOGGER.stopBroadcastGroup(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601250, value = "User {0} is stopping broadcasting group on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void stopBroadcastGroup(String user, Object source, Object... args);
static void getMaxHops(Object source) {
LOGGER.getMaxHops(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601251, value = "User {0} is getting max hops on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMaxHops(String user, Object source, Object... args);
static void getStaticConnectorsAsJSON(Object source) {
LOGGER.getStaticConnectorsAsJSON(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601252, value = "User {0} is geting static connectors as json on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getStaticConnectorsAsJSON(String user, Object source, Object... args);
static void isDuplicateDetection(Object source) {
LOGGER.isDuplicateDetection(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601253, value = "User {0} is querying use duplicate detection on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isDuplicateDetection(String user, Object source, Object... args);
static void getMessageLoadBalancingType(Object source) {
LOGGER.getMessageLoadBalancingType(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601254, value = "User {0} is getting message loadbalancing type on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getMessageLoadBalancingType(String user, Object source, Object... args);
static void getTopology(Object source) {
LOGGER.getTopology(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601255, value = "User {0} is getting topology on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getTopology(String user, Object source, Object... args);
static void getNodes(Object source) {
LOGGER.getNodes(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601256, value = "User {0} is getting nodes on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getNodes(String user, Object source, Object... args);
static void startClusterConnection(Object source) {
LOGGER.startClusterConnection(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601257, value = "User {0} is start cluster connection on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void startClusterConnection(String user, Object source, Object... args);
static void stopClusterConnection(Object source) {
LOGGER.stopClusterConnection(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601258, value = "User {0} is stop cluster connection on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void stopClusterConnection(String user, Object source, Object... args);
static void getBridgeMetrics(Object source, Object... args) {
LOGGER.getBridgeMetrics(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601259, value = "User {0} is getting bridge metrics on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getBridgeMetrics(String user, Object source, Object... args);
static void getRoutingName(Object source) {
LOGGER.getRoutingName(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601260, value = "User {0} is getting routing name on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getRoutingName(String user, Object source, Object... args);
static void getUniqueName(Object source) {
LOGGER.getUniqueName(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601261, value = "User {0} is getting unique name on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getUniqueName(String user, Object source, Object... args);
static void serverSessionCreateAddress(Object source, String user, Object... args) {
LOGGER.serverSessionCreateAddress2(getCaller(user), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601262, value = "User {0} is creating address on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void serverSessionCreateAddress2(String user, Object source, Object... args);
static void handleManagementMessage(Object source, String user, Object... args) {
LOGGER.handleManagementMessage2(getCaller(user), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601263, value = "User {0} is handling a management message on target resource {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void handleManagementMessage2(String user, Object source, Object... args);
static void securityFailure(Exception cause) {
LOGGER.securityFailure(getCaller(), cause);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601264, value = "User {0} gets security check failure", format = Message.Format.MESSAGE_FORMAT)
void securityFailure(String user, @Cause Throwable cause);
static void createCoreConsumer(Object source, String user, Object... args) {
LOGGER.createCoreConsumer(getCaller(user), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601265, value = "User {0} is creating a core consumer on target resource {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void createCoreConsumer(String user, Object source, Object... args);
static void createSharedQueue(Object source, String user, Object... args) {
LOGGER.createSharedQueue(getCaller(user), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601266, value = "User {0} is creating a shared queue on target resource {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void createSharedQueue(String user, Object source, Object... args);
static void createCoreSession(Object source, Object... args) {
LOGGER.createCoreSession(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601267, value = "User {0} is creating a core session on target resource {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void createCoreSession(String user, Object source, Object... args);
static void getAcknowledgeAttempts(Object source) {
LOGGER.getMessagesAcknowledged(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601269, value = "User {0} is getting messages acknowledged attempts on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getAcknowledgeAttempts(String user, Object source, Object... args);
static void getRingSize(Object source, Object... args) {
LOGGER.getRingSize(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601270, value = "User {0} is getting ring size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getRingSize(String user, Object source, Object... args);
static void isRetroactiveResource(Object source) {
LOGGER.isRetroactiveResource(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601271, value = "User {0} is getting retroactiveResource property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isRetroactiveResource(String user, Object source, Object... args);
static void getDiskStoreUsage(Object source) {
LOGGER.getDiskStoreUsage(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601272, value = "User {0} is getting disk store usage on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDiskStoreUsage(String user, Object source, Object... args);
static void getDiskStoreUsagePercentage(Object source) {
LOGGER.getDiskStoreUsagePercentage(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601273, value = "User {0} is getting disk store usage percentage on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getDiskStoreUsagePercentage(String user, Object source, Object... args);
static void isGroupRebalance(Object source) {
LOGGER.isGroupRebalance(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601274, value = "User {0} is getting group rebalance property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isGroupRebalance(String user, Object source, Object... args);
static void getGroupBuckets(Object source) {
LOGGER.getGroupBuckets(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601275, value = "User {0} is getting group buckets on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getGroupBuckets(String user, Object source, Object... args);
static void getGroupFirstKey(Object source) {
LOGGER.getGroupFirstKey(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601276, value = "User {0} is getting group first key on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getGroupFirstKey(String user, Object source, Object... args);
static void getCurrentDuplicateIdCacheSize(Object source) {
LOGGER.getCurrentDuplicateIdCacheSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601509, value = "User {0} is getting currentDuplicateIdCacheSize property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getCurrentDuplicateIdCacheSize(String user, Object source, Object... args);
static void clearDuplicateIdCache(Object source) {
LOGGER.clearDuplicateIdCache(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601510, value = "User {0} is clearing duplicate ID cache on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void clearDuplicateIdCache(String user, Object source, Object... args);
static void getChannelName(Object source) {
LOGGER.getChannelName(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601511, value = "User {0} is getting channelName property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getChannelName(String user, Object source, Object... args);
static void getFileContents(Object source) {
LOGGER.getFileContents(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601512, value = "User {0} is getting fileContents property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getFileContents(String user, Object source, Object... args);
static void getFile(Object source) {
LOGGER.getFile(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601513, value = "User {0} is getting file property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getFile(String user, Object source, Object... args);
static void getPreparedTransactionMessageCount(Object source) {
LOGGER.getPreparedTransactionMessageCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601514, value = "User {0} is getting preparedTransactionMessageCount property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getPreparedTransactionMessageCount(String user, Object source, Object... args);
/*
* This logger is for message production and consumption and is on the hot path so enabled independently
*
* */
//hot path log using a different logger
static void coreSendMessage(String user, String messageToString, Object context) {
MESSAGE_LOGGER.logCoreSendMessage(getCaller(user), messageToString, context);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601500, value = "User {0} is sending a message {1}, with Context: {2}", format = Message.Format.MESSAGE_FORMAT)
void logCoreSendMessage(String user, String messageToString, Object context);
//hot path log using a different logger
static void coreConsumeMessage(Subject user, String queue) {
MESSAGE_LOGGER.consumeMessage(getCaller(user), queue);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601501, value = "User {0} is consuming a message from {1}", format = Message.Format.MESSAGE_FORMAT)
void consumeMessage(String user, String address);
/*
* This logger is focused on user interaction from the console or thru resource specific functions in the management layer/JMX
* */
static void createAddressSuccess(String name, String routingTypes) {
RESOURCE_LOGGER.createAddressSuccess(getCaller(), name, routingTypes);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601701, value = "User {0} successfully created Address: {1} with routing types {2}", format = Message.Format.MESSAGE_FORMAT)
void createAddressSuccess(String user, String name, String routingTypes);
static void createAddressFailure(String name, String routingTypes) {
RESOURCE_LOGGER.createAddressFailure(getCaller(), name, routingTypes);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601702, value = "User {0} failed to created Address: {1} with routing types {2}", format = Message.Format.MESSAGE_FORMAT)
void createAddressFailure(String user, String name, String routingTypes);
static void updateAddressSuccess(String name, String routingTypes) {
RESOURCE_LOGGER.updateAddressSuccess(getCaller(), name, routingTypes);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601703, value = "User {0} successfully updated Address: {1} with routing types {2}", format = Message.Format.MESSAGE_FORMAT)
void updateAddressSuccess(String user, String name, String routingTypes);
static void updateAddressFailure(String name, String routingTypes) {
RESOURCE_LOGGER.updateAddressFailure(getCaller(), name, routingTypes);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601704, value = "User {0} successfully updated Address: {1} with routing types {2}", format = Message.Format.MESSAGE_FORMAT)
void updateAddressFailure(String user, String name, String routingTypes);
static void deleteAddressSuccess(String name) {
RESOURCE_LOGGER.deleteAddressSuccess(getCaller(), name);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601705, value = "User {0} successfully deleted Address: {1}", format = Message.Format.MESSAGE_FORMAT)
void deleteAddressSuccess(String user, String name);
static void deleteAddressFailure(String name) {
RESOURCE_LOGGER.deleteAddressFailure(getCaller(), name);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601706, value = "User {0} failed to deleted Address: {1}", format = Message.Format.MESSAGE_FORMAT)
void deleteAddressFailure(String user, String name);
static void createQueueSuccess(String name, String address, String routingType) {
RESOURCE_LOGGER.createQueueSuccess(getCaller(), name, address, routingType);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601707, value = "User {0} successfully created Queue: {1} on Address: {2} with routing type {3}", format = Message.Format.MESSAGE_FORMAT)
void createQueueSuccess(String user, String name, String address, String routingType);
static void createQueueFailure(String name, String address, String routingType) {
RESOURCE_LOGGER.createQueueFailure(getCaller(), name, address, routingType);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601708, value = "User {0} failed to create Queue: {1} on Address: {2} with routing type {3}", format = Message.Format.MESSAGE_FORMAT)
void createQueueFailure(String user, String name, String address, String routingType);
static void updateQueueSuccess(String name, String routingType) {
RESOURCE_LOGGER.updateQueueSuccess(getCaller(), name, routingType);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601709, value = "User {0} successfully updated Queue: {1} with routing type {2}", format = Message.Format.MESSAGE_FORMAT)
void updateQueueSuccess(String user, String name, String routingType);
static void updateQueueFailure(String name, String routingType) {
RESOURCE_LOGGER.updateQueueFailure(getCaller(), name, routingType);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601710, value = "User {0} failed to update Queue: {1} with routing type {2}", format = Message.Format.MESSAGE_FORMAT)
void updateQueueFailure(String user, String name, String routingType);
static void destroyQueueSuccess(String name) {
RESOURCE_LOGGER.destroyQueueSuccess(getCaller(), name);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601711, value = "User {0} successfully deleted Queue: {1}", format = Message.Format.MESSAGE_FORMAT)
void destroyQueueSuccess(String user, String name);
static void destroyQueueFailure(String name) {
RESOURCE_LOGGER.destroyQueueFailure(getCaller(), name);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601712, value = "User {0} failed to delete Queue: {1}", format = Message.Format.MESSAGE_FORMAT)
void destroyQueueFailure(String user, String name);
static void removeMessagesSuccess(int removed, String queue) {
RESOURCE_LOGGER.removeMessagesSuccess(getCaller(), removed, queue);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601713, value = "User {0} has removed {1} messages from Queue: {2}", format = Message.Format.MESSAGE_FORMAT)
void removeMessagesSuccess(String user, int removed, String queue);
static void removeMessagesFailure(String queue) {
RESOURCE_LOGGER.removeMessagesFailure(getCaller(), queue);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601714, value = "User {0} failed to remove messages from Queue: {1}", format = Message.Format.MESSAGE_FORMAT)
void removeMessagesFailure(String user, String queue);
static void userSuccesfullyLoggedInAudit(Subject subject) {
RESOURCE_LOGGER.userSuccesfullyLoggedIn(getCaller(subject));
}
static void userSuccesfullyLoggedInAudit() {
RESOURCE_LOGGER.userSuccesfullyLoggedIn(getCaller());
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601715, value = "User {0} successfully authorized", format = Message.Format.MESSAGE_FORMAT)
void userSuccesfullyLoggedIn(String caller);
static void userFailedLoggedInAudit(String reason) {
RESOURCE_LOGGER.userFailedLoggedIn(getCaller(), reason);
}
static void userFailedLoggedInAudit(Subject subject, String reason) {
RESOURCE_LOGGER.userFailedLoggedIn(getCaller(subject), reason);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601716, value = "User {0} failed authorization, reason: {1}", format = Message.Format.MESSAGE_FORMAT)
void userFailedLoggedIn(String user, String reason);
static void objectInvokedSuccessfully(ObjectName objectName, String operationName) {
RESOURCE_LOGGER.objectInvokedSuccessfully(getCaller(), objectName, operationName);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601717, value = "User {0} accessed {2} on management object {1}", format = Message.Format.MESSAGE_FORMAT)
void objectInvokedSuccessfully(String caller, ObjectName objectName, String operationName);
static void objectInvokedFailure(ObjectName objectName, String operationName) {
RESOURCE_LOGGER.objectInvokedFailure(getCaller(), objectName, operationName);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601718, value = "User {0} does not have correct role to access {2} on management object {1}", format = Message.Format.MESSAGE_FORMAT)
void objectInvokedFailure(String caller, ObjectName objectName, String operationName);
static void pauseQueueSuccess(String queueName) {
RESOURCE_LOGGER.pauseQueueSuccess(getCaller(), queueName);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601719, value = "User {0} has paused queue {1}", format = Message.Format.MESSAGE_FORMAT)
void pauseQueueSuccess(String user, String queueName);
static void pauseQueueFailure(String queueName) {
RESOURCE_LOGGER.pauseQueueFailure(getCaller(), queueName);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601720, value = "User {0} failed to pause queue {1}", format = Message.Format.MESSAGE_FORMAT)
void pauseQueueFailure(String user, String queueName);
static void resumeQueueSuccess(String queueName) {
RESOURCE_LOGGER.resumeQueueSuccess(getCaller(), queueName);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601721, value = "User {0} has resumed queue {1}", format = Message.Format.MESSAGE_FORMAT)
void resumeQueueSuccess(String user, String queueName);
static void resumeQueueFailure(String queueName) {
RESOURCE_LOGGER.pauseQueueFailure(getCaller(), queueName);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601722, value = "User {0} failed to resume queue {1}", format = Message.Format.MESSAGE_FORMAT)
void resumeQueueFailure(String user, String queueName);
static void sendMessageSuccess(String queueName, String user) {
RESOURCE_LOGGER.sendMessageSuccess(getCaller(), queueName, user);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601723, value = "User {0} sent message to {1} as user {2}", format = Message.Format.MESSAGE_FORMAT)
void sendMessageSuccess(String user, String queueName, String sendUser);
static void sendMessageFailure(String queueName, String user) {
RESOURCE_LOGGER.sendMessageFailure(getCaller(), queueName, user);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601724, value = "User {0} failed to send message to {1} as user {2}", format = Message.Format.MESSAGE_FORMAT)
void sendMessageFailure(String user, String queueName, String sendUser);
static void browseMessagesSuccess(String queueName, int numMessages) {
RESOURCE_LOGGER.browseMessagesSuccess(getCaller(), queueName, numMessages);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601725, value = "User {0} browsed {2} messages from queue {1}", format = Message.Format.MESSAGE_FORMAT)
void browseMessagesSuccess(String user, String queueName, int numMessages);
static void browseMessagesFailure(String queueName) {
RESOURCE_LOGGER.browseMessagesFailure(getCaller(), queueName);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601726, value = "User {0} failed to browse messages from queue {1}", format = Message.Format.MESSAGE_FORMAT)
void browseMessagesFailure(String user, String queueName);
static void updateDivert(Object source, Object... args) {
LOGGER.updateDivert(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601727, value = "User {0} is updating a divert on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void updateDivert(String user, Object source, Object... args);
static void isEnabled(Object source) {
LOGGER.isEnabled(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601728, value = "User {0} is getting enabled property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isEnabled(String user, Object source, Object... args);
static void disable(Object source, Object... args) {
LOGGER.disable(getCaller(), source, arrayToString(args));
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601729, value = "User {0} is disabling on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void disable(String user, Object source, Object... args);
static void enable(Object source) {
LOGGER.resume(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601730, value = "User {0} is enabling on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void enable(String user, Object source, Object... args);
static void pauseAddressSuccess(String queueName) {
RESOURCE_LOGGER.pauseAddressSuccess(getCaller(), queueName);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601731, value = "User {0} has paused address {1}", format = Message.Format.MESSAGE_FORMAT)
void pauseAddressSuccess(String user, String queueName);
static void pauseAddressFailure(String queueName) {
RESOURCE_LOGGER.pauseAddressFailure(getCaller(), queueName);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601732, value = "User {0} failed to pause address {1}", format = Message.Format.MESSAGE_FORMAT)
void pauseAddressFailure(String user, String queueName);
static void resumeAddressSuccess(String queueName) {
RESOURCE_LOGGER.resumeAddressSuccess(getCaller(), queueName);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601733, value = "User {0} has resumed address {1}", format = Message.Format.MESSAGE_FORMAT)
void resumeAddressSuccess(String user, String queueName);
static void resumeAddressFailure(String queueName) {
RESOURCE_LOGGER.resumeAddressFailure(getCaller(), queueName);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601734, value = "User {0} failed to resume address {1}", format = Message.Format.MESSAGE_FORMAT)
void resumeAddressFailure(String user, String queueName);
static void isGroupRebalancePauseDispatch(Object source) {
LOGGER.isGroupRebalancePauseDispatch(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601735, value = "User {0} is getting group rebalance pause dispatch property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void isGroupRebalancePauseDispatch(String user, Object source, Object... args);
static void getAuthenticationCacheSize(Object source) {
LOGGER.getAuthenticationCacheSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601736, value = "User {0} is getting authentication cache size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getAuthenticationCacheSize(String user, Object source, Object... args);
static void getAuthorizationCacheSize(Object source) {
LOGGER.getAuthorizationCacheSize(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601737, value = "User {0} is getting authorization cache size on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getAuthorizationCacheSize(String user, Object source, Object... args);
static void listBrokerConnections() {
LOGGER.listBrokerConnections(getCaller());
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601738, value = "User {0} is requesting a list of broker connections", format = Message.Format.MESSAGE_FORMAT)
void listBrokerConnections(String user);
static void stopBrokerConnection(String name) {
LOGGER.stopBrokerConnection(getCaller(), name);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601739, value = "User {0} is requesting to stop broker connection {1}", format = Message.Format.MESSAGE_FORMAT)
void stopBrokerConnection(String user, String name);
static void startBrokerConnection(String name) {
LOGGER.startBrokerConnection(getCaller(), name);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601740, value = "User {0} is requesting to start broker connection {1}", format = Message.Format.MESSAGE_FORMAT)
void startBrokerConnection(String user, String name);
static void getAddressCount(Object source) {
LOGGER.getAddressCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601741, value = "User {0} is getting address count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getAddressCount(String user, Object source, Object... args);
static void getQueueCount(Object source) {
LOGGER.getQueueCount(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601742, value = "User {0} is getting the queue count on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void getQueueCount(String user, Object source, Object... args);
static void lastValueKey(Object source) {
LOGGER.lastValueKey(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601743, value = "User {0} is getting last-value-key property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void lastValueKey(String user, Object source, Object... args);
static void consumersBeforeDispatch(Object source) {
LOGGER.consumersBeforeDispatch(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601744, value = "User {0} is getting consumers-before-dispatch property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void consumersBeforeDispatch(String user, Object source, Object... args);
static void delayBeforeDispatch(Object source) {
LOGGER.delayBeforeDispatch(getCaller(), source);
}
@LogMessage(level = Logger.Level.INFO)
@Message(id = 601745, value = "User {0} is getting delay-before-dispatch property on target resource: {1} {2}", format = Message.Format.MESSAGE_FORMAT)
void delayBeforeDispatch(String user, Object source, Object... args);
}
| apache-2.0 |
GreatfeatServices/gf-mobile-app | edward-bryan-abergas/android-exam/app/src/main/java/com/greatfeat/greatfeat/api/OnBindViewListener.java | 167 | package com.greatfeat.greatfeat.api;
/**
* Created by edwardbryanabergas on 16/06/2017.
*/
public interface OnBindViewListener {
void onBind(int position);
}
| apache-2.0 |
ahartley39/DataflowJavaSDK | sdk/src/main/java/com/google/cloud/dataflow/sdk/io/AvroSource.java | 25652 | /*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.io;
import com.google.cloud.dataflow.sdk.annotations.Experimental;
import com.google.cloud.dataflow.sdk.coders.AvroCoder;
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
import com.google.cloud.dataflow.sdk.util.IOChannelUtils;
import com.google.cloud.dataflow.sdk.values.PCollection;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileConstants;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.reflect.ReflectData;
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
import org.apache.commons.compress.compressors.snappy.SnappyCompressorInputStream;
import org.apache.commons.compress.compressors.xz.XZCompressorInputStream;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PushbackInputStream;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.ReadableByteChannel;
import java.util.Arrays;
import java.util.zip.Inflater;
import java.util.zip.InflaterInputStream;
// JAVADOCSTYLE OFF
/**
* A {@code FileBasedSource} for reading Avro-format files.
*
* <p>To read a {@link PCollection} of objects from one or more Avro files, use
* {@link AvroSource#from} to specify the path(s) of the files to read. The {@code AvroSource} that
* is returned will read objects of type {@code GenericRecord} with the schema(s) that were written
* at file creation. To further configure the {@code AvroSource} to read with a user-defined schema,
* or to return records of a type other than {@code GenericRecord}, use
* {@link AvroSource#withSchema(Schema)} (using a {@code Schema} object),
* {@link AvroSource#withSchema(String)} (using a JSON schema), or
* {@link AvroSource#withSchema(Class)} (to return objects of the class specified).
*
* <p>An {@code AvroSource} can be read from using the {@link Read} transform. For example:
*
* <pre>
* {@code
* AvroSource<T> source = AvroSource.from(file.toPath()).withSchema(MyType.class);
* PCollection<MyType> records = Read.from(mySource);
* }
* </pre>
*
* <p>The {@link AvroSource#readFromFileWithClass(String, Class)} method is a convenience method
* that returns a read transform. For example:
*
* <pre>
* {@code
* PCollection<MyType> records = AvroSource.readFromFileWithClass(file.toPath(), MyType.class));
* }
* </pre>
*
* <p>This class's implementation is based on the <a
* href="https://avro.apache.org/docs/1.7.7/spec.html">Avro 1.7.7</a> specification and implements
* parsing of some parts of Avro Object Container Files. The rationale for doing so is that the Avro
* API does not provide efficient ways of computing the precise offsets of blocks within a file,
* which is necessary to support dynamic work rebalancing. However, whenever it is possible to use
* the Avro API in a way that supports maintaining precise offsets, this class uses the Avro API.
*
* <p>Avro Object Container files store records in blocks. Each block contains a collection of
* records. Blocks may be encoded (e.g., with bzip2, deflate, snappy, etc.). Blocks are delineated
* from one another by a 16-byte sync marker.
*
* <p>An {@code AvroSource} for a subrange of a single file contains records in the blocks such that
* the start offset of the block is greater than or equal to the start offset of the source and less
* than the end offset of the source.
*
* <p>To use XZ-encoded Avro files, please include an explicit dependency on {@code xz-1.5.jar},
* which has been marked as optional in the Maven {@code sdk/pom.xml} for Google Cloud Dataflow:
* <pre>
* {@code
* <dependency>
* <groupId>org.tukaani</groupId>
* <artifactId>xz</artifactId>
* <version>1.5</version>
* </dependency>
* }
* </pre>
*
* @param <T> The type of records to be read from the source.
*
* <p><h3>Permissions</h3>
* Permission requirements depend on the
* {@link com.google.cloud.dataflow.sdk.runners.PipelineRunner PipelineRunner} that is
* used to execute the Dataflow job. Please refer to the documentation of corresponding
* {@code PipelineRunner}s for more details.
*/
// JAVADOCSTYLE ON
@Experimental(Experimental.Kind.SOURCE_SINK)
public class AvroSource<T> extends BlockBasedSource<T> {
// Default minimum bundle size (chosen as two default-size Avro blocks to attempt to
// ensure that every source has at least one block of records).
// The default sync interval is 64k.
static final long DEFAULT_MIN_BUNDLE_SIZE = 2 * DataFileConstants.DEFAULT_SYNC_INTERVAL;
// The JSON schema used to encode records.
private final String schema;
// The type of the records contained in the file.
private final Class<T> type;
// The following metadata fields are not user-configurable. They are extracted from the object
// container file header upon subsource creation.
// The codec used to encode the blocks in the Avro file. String value drawn from those in
// https://avro.apache.org/docs/1.7.7/api/java/org/apache/avro/file/CodecFactory.html
private final String codec;
// The object container file's 16-byte sync marker.
private final byte[] syncMarker;
// Default output coder, lazily initialized.
private transient AvroCoder<T> coder = null;
/**
* Creates a {@code Read} transform that will read from an {@code AvroSource} that is configured
* to read records of the given type from a file pattern.
*/
public static <T> Read.Bounded<T> readFromFileWithClass(String filePattern, Class<T> clazz) {
return Read.from(new AvroSource<T>(filePattern, DEFAULT_MIN_BUNDLE_SIZE,
ReflectData.get().getSchema(clazz).toString(), clazz, null, null));
}
/**
* Creates an {@code AvroSource} that reads from the given file name or pattern ("glob"). The
* returned source can be further configured by calling {@code withSchema} to return a type other
* than {@code GenericRecord}.
*/
public static AvroSource<GenericRecord> from(String fileNameOrPattern) {
return new AvroSource<>(
fileNameOrPattern, DEFAULT_MIN_BUNDLE_SIZE, null, GenericRecord.class, null, null);
}
/**
* Returns an {@code AvroSource} that's like this one but reads files containing records that
* conform to the given schema.
*/
public AvroSource<GenericRecord> withSchema(String schema) {
return new AvroSource<>(
getFileOrPatternSpec(), getMinBundleSize(), schema, GenericRecord.class, codec, syncMarker);
}
/**
* Returns an {@code AvroSource} that's like this one but reads files containing records that
* conform to the given schema.
*/
public AvroSource<GenericRecord> withSchema(Schema schema) {
return new AvroSource<>(getFileOrPatternSpec(), getMinBundleSize(), schema.toString(),
GenericRecord.class, codec, syncMarker);
}
/**
* Returns an {@code AvroSource} that's like this one but reads files containing records of the
* type of the given class.
*/
public <X> AvroSource<X> withSchema(Class<X> clazz) {
return new AvroSource<X>(getFileOrPatternSpec(), getMinBundleSize(),
ReflectData.get().getSchema(clazz).toString(), clazz, codec, syncMarker);
}
/**
* Returns an {@code AvroSource} that's like this one but uses the supplied minimum bundle size.
* Refer to {@link OffsetBasedSource} for a description of {@code minBundleSize} and its use.
*/
public AvroSource<T> withMinBundleSize(long minBundleSize) {
return new AvroSource<T>(
getFileOrPatternSpec(), minBundleSize, schema, type, codec, syncMarker);
}
private AvroSource(String fileNameOrPattern, long minBundleSize, String schema, Class<T> type,
String codec, byte[] syncMarker) {
super(fileNameOrPattern, minBundleSize);
this.schema = schema;
this.codec = codec;
this.syncMarker = syncMarker;
this.type = type;
}
private AvroSource(String fileName, long minBundleSize, long startOffset, long endOffset,
String schema, Class<T> type, String codec, byte[] syncMarker) {
super(fileName, minBundleSize, startOffset, endOffset);
this.schema = schema;
this.codec = codec;
this.syncMarker = syncMarker;
this.type = type;
}
@Override
public void validate() {
// AvroSource objects do not need to be configured with more than a file pattern. Overridden to
// make this explicit.
super.validate();
}
/**
* Avro file metadata. Visible for testing.
*/
static class Metadata {
byte[] syncMarker;
String codec;
String schema;
public Metadata(byte[] syncMarker, String codec, String schema) {
this.syncMarker = syncMarker;
this.codec = codec;
this.schema = schema;
}
}
/**
* Reads the {@code Metadata} from the header of an Avro file. Throws an IOException if the file
* is an invalid format.
*
* <p>This method parses the header of an Avro
* <a href="https://avro.apache.org/docs/1.7.7/spec.html#Object+Container+Files">
* Object Container File</a>.
*/
static Metadata readMetadataFromFile(String fileName) throws IOException {
String codec = null;
String schema = null;
byte[] syncMarker;
try (InputStream stream =
Channels.newInputStream(IOChannelUtils.getFactory(fileName).open(fileName))) {
BinaryDecoder decoder = DecoderFactory.get().binaryDecoder(stream, null);
// The header of an object container file begins with a four-byte magic number, followed
// by the file metadata (including the schema and codec), encoded as a map. Finally, the
// header ends with the file's 16-byte sync marker.
// See https://avro.apache.org/docs/1.7.7/spec.html#Object+Container+Files for details on
// the encoding of container files.
// Read the magic number.
byte[] magic = new byte[DataFileConstants.MAGIC.length];
decoder.readFixed(magic);
if (!Arrays.equals(magic, DataFileConstants.MAGIC)) {
throw new IOException("Missing Avro file signature: " + fileName);
}
// Read the metadata to find the codec and schema.
ByteBuffer valueBuffer = ByteBuffer.allocate(512);
long numRecords = decoder.readMapStart();
while (numRecords > 0) {
for (long recordIndex = 0; recordIndex < numRecords; recordIndex++) {
String key = decoder.readString();
// readBytes() clears the buffer and returns a buffer where:
// - position is the start of the bytes read
// - limit is the end of the bytes read
valueBuffer = decoder.readBytes(valueBuffer);
byte[] bytes = new byte[valueBuffer.remaining()];
valueBuffer.get(bytes);
if (key.equals(DataFileConstants.CODEC)) {
codec = new String(bytes, "UTF-8");
} else if (key.equals(DataFileConstants.SCHEMA)) {
schema = new String(bytes, "UTF-8");
}
}
numRecords = decoder.mapNext();
}
if (codec == null) {
codec = DataFileConstants.NULL_CODEC;
}
// Finally, read the sync marker.
syncMarker = new byte[DataFileConstants.SYNC_SIZE];
decoder.readFixed(syncMarker);
}
return new Metadata(syncMarker, codec, schema);
}
@Override
public AvroSource<T> createForSubrangeOfFile(String fileName, long start, long end) {
byte[] syncMarker = this.syncMarker;
String codec = this.codec;
String schema = this.schema;
// codec and syncMarker are initially null when the source is created, as they differ
// across input files and must be read from the file. Here, when we are creating a source
// for a subrange of a file, we can initialize these values. When the resulting AvroSource
// is further split, they do not need to be read again.
if (codec == null || syncMarker == null) {
Metadata metadata;
try {
metadata = readMetadataFromFile(fileName);
} catch (IOException e) {
throw new RuntimeException("Error reading metadata from file " + fileName, e);
}
codec = metadata.codec;
syncMarker = metadata.syncMarker;
// If the source was created with a null schema, use the schema that we read from the file's
// metadata.
if (schema == null) {
schema = metadata.schema;
}
}
return new AvroSource<T>(
fileName, getMinBundleSize(), start, end, schema, type, codec, syncMarker);
}
@Override
public AvroReader<T> createSingleFileReader(PipelineOptions options) {
return new AvroReader<T>(this);
}
@Override
public boolean producesSortedKeys(PipelineOptions options) throws Exception {
return false;
}
@Override
public AvroCoder<T> getDefaultOutputCoder() {
if (coder == null) {
Schema.Parser parser = new Schema.Parser();
coder = AvroCoder.of(type, parser.parse(schema));
}
return coder;
}
public String getSchema() {
return schema;
}
private byte[] getSyncMarker() {
return syncMarker;
}
private String getCodec() {
return codec;
}
/**
* A {@link BlockBasedSource.Block} of Avro records. Visible for testing.
* @param <T> The type of records stored in the block.
*/
@Experimental(Experimental.Kind.SOURCE_SINK)
static class AvroBlock<T> extends Block<T> {
// The number of records in the block.
private final long numRecords;
// The current record in the block.
private T currentRecord;
// The index of the current record in the block.
private long currentRecordIndex = 0;
// A DatumReader to read records from the block.
private final DatumReader<T> reader;
// A BinaryDecoder used by the reader to decode records.
private final BinaryDecoder decoder;
/**
* Decodes a byte array as an InputStream. The byte array may be compressed using some
* codec. Reads from the returned stream will result in decompressed bytes.
*
* <p>This supports the same codecs as Avro's {@code CodecFactory}, namely those defined in
* <a
* href="https://avro.apache.org/docs/1.7.7/api/java/org/apache/avro/file/DataFileConstants.html">
* {@code DataFileConstants}</a>.
* <ul>
* <li>"snappy" : Google's Snappy compression
* <li>"deflate" : deflate compression
* <li>"bzip2" : Bzip2 compression
* <li>"xz" : xz compression
* <li>"null" (the string, not the value): Uncompressed data
* </ul>
*/
private static InputStream decodeAsInputStream(byte[] data, String codec) throws IOException {
ByteArrayInputStream byteStream = new ByteArrayInputStream(data);
switch (codec) {
case DataFileConstants.SNAPPY_CODEC:
return new SnappyCompressorInputStream(byteStream);
case DataFileConstants.DEFLATE_CODEC:
// nowrap == true: Do not expect ZLIB header or checksum, as Avro does not write them.
Inflater inflater = new Inflater(true);
return new InflaterInputStream(byteStream, inflater);
case DataFileConstants.XZ_CODEC:
return new XZCompressorInputStream(byteStream);
case DataFileConstants.BZIP2_CODEC:
return new BZip2CompressorInputStream(byteStream);
case DataFileConstants.NULL_CODEC:
return byteStream;
default:
throw new IllegalArgumentException("Unsupported codec: " + codec);
}
}
AvroBlock(byte[] data, long numRecords, AvroSource<T> source) throws IOException {
this.numRecords = numRecords;
this.reader = source.getDefaultOutputCoder().createDatumReader();
this.decoder =
DecoderFactory.get().binaryDecoder(decodeAsInputStream(data, source.getCodec()), null);
}
@Override
public T getCurrentRecord() {
return currentRecord;
}
@Override
public boolean readNextRecord() throws IOException {
if (currentRecordIndex >= numRecords) {
return false;
}
currentRecord = reader.read(null, decoder);
currentRecordIndex++;
return true;
}
@Override
public double getFractionOfBlockConsumed() {
return ((double) currentRecordIndex) / numRecords;
}
}
/**
* A {@link BlockBasedSource.BlockBasedReader} for reading blocks from Avro files.
*
* <p>An Avro Object Container File consists of a header followed by a 16-bit sync marker
* and then a sequence of blocks, where each block begins with two encoded longs representing
* the total number of records in the block and the block's size in bytes, followed by the
* block's (optionally-encoded) records. Each block is terminated by a 16-bit sync marker.
*
* <p>Here, we consider the sync marker that precedes a block to be its offset, as this allows
* a reader that begins reading at that offset to detect the sync marker and the beginning of
* the block.
*
* @param <T> The type of records contained in the block.
*/
@Experimental(Experimental.Kind.SOURCE_SINK)
public static class AvroReader<T> extends BlockBasedReader<T> {
// The current block.
private AvroBlock<T> currentBlock;
// Offset of the block.
private long currentBlockOffset = 0;
// Size of the current block.
private long currentBlockSizeBytes = 0;
// Current offset within the stream.
private long currentOffset = 0;
// Stream used to read from the underlying file.
// A pushback stream is used to restore bytes buffered during seeking/decoding.
private PushbackInputStream stream;
// Small buffer for reading encoded values from the stream.
// The maximum size of an encoded long is 10 bytes, and this buffer will be used to read two.
private final byte[] readBuffer = new byte[20];
// Decoder to decode binary-encoded values from the buffer.
private BinaryDecoder decoder;
public AvroReader(AvroSource<T> source) {
super(source);
}
@Override
public AvroSource<T> getCurrentSource() {
return (AvroSource<T>) super.getCurrentSource();
}
@Override
public boolean readNextBlock() throws IOException {
// The next block in the file is after the first sync marker that can be read starting from
// the current offset. First, we seek past the next sync marker, if it exists. After a sync
// marker is the start of a block. A block begins with the number of records contained in
// the block, encoded as a long, followed by the size of the block in bytes, encoded as a
// long. The currentOffset after this method should be last byte after this block, and the
// currentBlockOffset should be the start of the sync marker before this block.
// Seek to the next sync marker, if one exists.
currentOffset += advancePastNextSyncMarker(stream, getCurrentSource().getSyncMarker());
// The offset of the current block includes its preceding sync marker.
currentBlockOffset = currentOffset - getCurrentSource().getSyncMarker().length;
// Read a small buffer to parse the block header.
// We cannot use a BinaryDecoder to do this directly from the stream because a BinaryDecoder
// internally buffers data and we only want to read as many bytes from the stream as the size
// of the header. Though BinaryDecoder#InputStream returns an input stream that is aware of
// its internal buffering, we would have to re-wrap this input stream to seek for the next
// block in the file.
int read = stream.read(readBuffer);
// We reached the last sync marker in the file.
if (read <= 0) {
return false;
}
decoder = DecoderFactory.get().binaryDecoder(readBuffer, decoder);
long numRecords = decoder.readLong();
long blockSize = decoder.readLong();
// The decoder buffers data internally, but since we know the size of the stream the
// decoder has constructed from the readBuffer, the number of bytes available in the
// input stream is equal to the number of unconsumed bytes.
int headerSize = readBuffer.length - decoder.inputStream().available();
stream.unread(readBuffer, headerSize, read - headerSize);
// Create the current block by reading blockSize bytes. Block sizes permitted by the Avro
// specification are [32, 2^30], so this narrowing is ok.
byte[] data = new byte[(int) blockSize];
stream.read(data);
currentBlock = new AvroBlock<>(data, numRecords, getCurrentSource());
currentBlockSizeBytes = blockSize;
// Update current offset with the number of bytes we read to get the next block.
currentOffset += headerSize + blockSize;
return true;
}
@Override
public AvroBlock<T> getCurrentBlock() {
return currentBlock;
}
@Override
public long getCurrentBlockOffset() {
return currentBlockOffset;
}
@Override
public long getCurrentBlockSize() {
return currentBlockSizeBytes;
}
/**
* Creates a {@code PushbackInputStream} that has a large enough pushback buffer to be able
* to push back the syncBuffer and the readBuffer.
*/
private PushbackInputStream createStream(ReadableByteChannel channel) {
return new PushbackInputStream(
Channels.newInputStream(channel),
getCurrentSource().getSyncMarker().length + readBuffer.length);
}
/**
* Starts reading from the provided channel. Assumes that the channel is already seeked to
* the source's start offset.
*/
@Override
protected void startReading(ReadableByteChannel channel) throws IOException {
stream = createStream(channel);
currentOffset = getCurrentSource().getStartOffset();
}
/**
* Advances to the first byte after the next occurrence of the sync marker in the
* stream when reading from the current offset. Returns the number of bytes consumed
* from the stream. Note that this method requires a PushbackInputStream with a buffer
* at least as big as the marker it is seeking for.
*/
static long advancePastNextSyncMarker(PushbackInputStream stream, byte[] syncMarker)
throws IOException {
Seeker seeker = new Seeker(syncMarker);
byte[] syncBuffer = new byte[syncMarker.length];
long totalBytesConsumed = 0;
// Seek until either a sync marker is found or we reach the end of the file.
int mark = -1; // Position of the last byte in the sync marker.
int read; // Number of bytes read.
do {
read = stream.read(syncBuffer);
if (read >= 0) {
mark = seeker.find(syncBuffer, read);
// Update the currentOffset with the number of bytes read.
totalBytesConsumed += read;
}
} while (mark < 0 && read > 0);
// If the sync marker was found, unread block data and update the current offsets.
if (mark >= 0) {
// The current offset after this call should be just past the sync marker, so we should
// unread the remaining buffer contents and update the currentOffset accordingly.
stream.unread(syncBuffer, mark + 1, read - (mark + 1));
totalBytesConsumed = totalBytesConsumed - (read - (mark + 1));
}
return totalBytesConsumed;
}
/**
* A {@code Seeker} looks for a given marker within a byte buffer. Uses naive string matching
* with a sliding window, as sync markers are small and random.
*/
static class Seeker {
// The marker to search for.
private byte[] marker;
// Buffer used for the sliding window.
private byte[] searchBuffer;
// Number of bytes available to be matched in the buffer.
private int available = 0;
/**
* Create a {@code Seeker} that looks for the given marker.
*/
public Seeker(byte[] marker) {
this.marker = marker;
this.searchBuffer = new byte[marker.length];
}
/**
* Find the marker in the byte buffer. Returns the index of the end of the marker in the
* buffer. If the marker is not found, returns -1.
*
* <p>State is maintained between calls. If the marker was partially matched, a subsequent
* call to find will resume matching the marker.
*
* @param buffer
* @return the index of the end of the marker within the buffer, or -1 if the buffer was not
* found.
*/
public int find(byte[] buffer, int length) {
for (int i = 0; i < length; i++) {
System.arraycopy(searchBuffer, 1, searchBuffer, 0, searchBuffer.length - 1);
searchBuffer[searchBuffer.length - 1] = buffer[i];
available = Math.min(available + 1, searchBuffer.length);
if (ByteBuffer.wrap(searchBuffer, searchBuffer.length - available, available)
.equals(ByteBuffer.wrap(marker))) {
available = 0;
return i;
}
}
return -1;
}
}
}
}
| apache-2.0 |
jamesnetherton/wildfly-camel | itests/standalone/basic/src/test/java/org/wildfly/camel/test/rmi/ISay.java | 981 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.camel.test.rmi;
import java.rmi.Remote;
import java.rmi.RemoteException;
public interface ISay extends Remote {
String say() throws RemoteException;
}
| apache-2.0 |
davidkarlsen/camel | components/camel-ftp/src/test/java/org/apache/camel/component/file/remote/FtpsServerTestSupport.java | 4823 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.file.remote;
import java.io.File;
import java.security.NoSuchAlgorithmException;
import org.apache.camel.util.ObjectHelper;
import org.apache.ftpserver.FtpServerFactory;
import org.apache.ftpserver.listener.ListenerFactory;
import org.apache.ftpserver.ssl.SslConfigurationFactory;
/**
* Abstract base class for unit testing using a secure FTP Server (over SSL/TLS)
*/
public abstract class FtpsServerTestSupport extends FtpServerTestSupport {
protected static final String AUTH_VALUE_SSL = "SSLv3";
protected static final String AUTH_VALUE_TLS = "TLSv1.2";
protected static final File FTPSERVER_KEYSTORE = new File("./src/test/resources/server.jks");
protected static final String FTPSERVER_KEYSTORE_PASSWORD = "password";
@Override
protected FtpServerFactory createFtpServerFactory() throws Exception {
try {
return doCreateFtpServerFactory();
} catch (Exception e) {
// ignore if algorithm is not on the OS
NoSuchAlgorithmException nsae = ObjectHelper.getException(NoSuchAlgorithmException.class, e);
if (nsae != null) {
String name = System.getProperty("os.name");
String message = nsae.getMessage();
log.warn("SunX509 is not avail on this platform [{}] Testing is skipped! Real cause: {}", name, message);
return null;
} else {
// some other error then throw it so the test can fail
throw e;
}
}
}
protected FtpServerFactory doCreateFtpServerFactory() throws Exception {
assertTrue(FTPSERVER_KEYSTORE.exists());
FtpServerFactory serverFactory = super.createFtpServerFactory();
ListenerFactory listenerFactory = new ListenerFactory(serverFactory.getListener(DEFAULT_LISTENER));
listenerFactory.setImplicitSsl(useImplicit());
listenerFactory.setSslConfiguration(createSslConfiguration().createSslConfiguration());
serverFactory.addListener(DEFAULT_LISTENER, listenerFactory.createListener());
return serverFactory;
}
protected SslConfigurationFactory createSslConfiguration() {
// comment in, if you have trouble with SSL
// System.setProperty("javax.net.debug", "all");
SslConfigurationFactory sslConfigFactory = new SslConfigurationFactory();
sslConfigFactory.setSslProtocol(getAuthValue());
sslConfigFactory.setKeystoreFile(FTPSERVER_KEYSTORE);
sslConfigFactory.setKeystoreType("JKS");
sslConfigFactory.setKeystoreAlgorithm("SunX509");
sslConfigFactory.setKeystorePassword(FTPSERVER_KEYSTORE_PASSWORD);
sslConfigFactory.setKeyPassword(FTPSERVER_KEYSTORE_PASSWORD);
sslConfigFactory.setClientAuthentication(getClientAuth());
if (Boolean.valueOf(getClientAuth())) {
sslConfigFactory.setTruststoreFile(FTPSERVER_KEYSTORE);
sslConfigFactory.setTruststoreType("JKS");
sslConfigFactory.setTruststoreAlgorithm("SunX509");
sslConfigFactory.setTruststorePassword(FTPSERVER_KEYSTORE_PASSWORD);
}
return sslConfigFactory;
}
/**
* Set what client authentication level to use, supported values are "yes"
* or "true" for required authentication, "want" for wanted authentication
* and "false" or "none" for no authentication. Defaults to "none".
*
* @return clientAuthReqd
*/
protected abstract String getClientAuth();
/**
* Should listeners created by this factory automatically be in SSL mode
* automatically or must the client explicitly request to use SSL
*/
protected abstract boolean useImplicit();
/**
* Set the SSL protocol used for this channel. Supported values are "SSL"
* and "TLS".
*/
protected abstract String getAuthValue();
} | apache-2.0 |
apache/incubator-shardingsphere | shardingsphere-jdbc/shardingsphere-jdbc-core/src/test/java/org/apache/shardingsphere/driver/executor/batch/BatchExecutionUnitTest.java | 3003 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.driver.executor.batch;
import org.apache.shardingsphere.infra.executor.sql.context.ExecutionUnit;
import org.apache.shardingsphere.infra.executor.sql.context.SQLUnit;
import org.junit.Test;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
public final class BatchExecutionUnitTest {
private static final String DATA_SOURCE_NAME = "ds";
private static final String SQL = "SELECT * FROM table WHERE id = ?";
@Test
public void assertGetParameterSets() {
BatchExecutionUnit batchExecutionUnit = new BatchExecutionUnit(new ExecutionUnit(DATA_SOURCE_NAME, new SQLUnit(SQL, Collections.singletonList(1))));
List<List<Object>> actual = batchExecutionUnit.getParameterSets();
assertThat(actual.size(), is(1));
assertTrue(actual.get(0).isEmpty());
batchExecutionUnit.mapAddBatchCount(0);
actual = batchExecutionUnit.getParameterSets();
assertThat(actual.size(), is(1));
assertThat(actual.get(0).size(), is(1));
assertThat(actual.get(0).get(0), is(1));
}
@Test
public void assertEquals() {
BatchExecutionUnit actual = new BatchExecutionUnit(new ExecutionUnit(DATA_SOURCE_NAME, new SQLUnit(SQL, Collections.singletonList(1))));
BatchExecutionUnit expected = new BatchExecutionUnit(new ExecutionUnit(DATA_SOURCE_NAME, new SQLUnit(SQL, Collections.singletonList(2))));
assertThat(actual, is(expected));
}
@Test
public void assertToString() {
ExecutionUnit executionUnit = new ExecutionUnit(DATA_SOURCE_NAME, new SQLUnit(SQL, Collections.singletonList(1)));
BatchExecutionUnit actual = new BatchExecutionUnit(executionUnit);
assertThat(actual.toString(), is(String.format("BatchExecutionUnit(executionUnit=ExecutionUnit"
+ "(dataSourceName=%s, sqlUnit=SQLUnit(sql=%s, parameters=[%d], tableRouteMappers=[])), "
+ "jdbcAndActualAddBatchCallTimesMap={}, actualCallAddBatchTimes=0)", DATA_SOURCE_NAME, SQL, 1, "null")));
}
}
| apache-2.0 |
shabtaisharon/ds3_java_browser | dsb-gui/src/main/java/com/spectralogic/dsbrowser/gui/services/tasks/Ds3DeleteFilesTask.java | 2959 | /*
* ******************************************************************************
* Copyright 2016-2018 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ******************************************************************************
*/
package com.spectralogic.dsbrowser.gui.services.tasks;
import com.google.common.collect.ImmutableList;
import com.spectralogic.ds3client.Ds3Client;
import com.spectralogic.ds3client.commands.DeleteObjectsRequest;
import com.spectralogic.dsbrowser.gui.components.ds3panel.ds3treetable.Ds3TreeTableValue;
import com.spectralogic.dsbrowser.gui.util.Ds3Task;
import com.spectralogic.dsbrowser.gui.util.StringConstants;
import javafx.concurrent.WorkerStateEvent;
import javafx.event.Event;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
public class Ds3DeleteFilesTask extends Ds3Task {
private final ImmutableList<String> buckets;
private final Ds3Client ds3Client;
private String errorMsg;
private final Map<String, List<Ds3TreeTableValue>> bucketObjectsMap;
public Ds3DeleteFilesTask(final Ds3Client ds3Client,
final ImmutableList<String> buckets,
final Map<String, List<Ds3TreeTableValue>> bucketObjectsMap) {
this.ds3Client = ds3Client;
this.buckets = buckets;
this.bucketObjectsMap = bucketObjectsMap;
}
@Override
protected Optional<String> call() {
try {
int deleteSize = 0;
final Set<String> bucketSet = bucketObjectsMap.keySet();
for (final String bucket : buckets) {
ds3Client.deleteObjects(new DeleteObjectsRequest(bucket,
bucketObjectsMap.get(bucket).stream().map(Ds3TreeTableValue::getFullName)
.collect(Collectors.toList())));
deleteSize++;
if (deleteSize == bucketSet.size()) {
return Optional.of(StringConstants.SUCCESS);
}
}
} catch (final IOException e) {
errorMsg = e.getMessage();
this.fireEvent(new Event(WorkerStateEvent.WORKER_STATE_FAILED));
return Optional.empty();
}
return Optional.empty();
}
@Override
public String getErrorMsg() {
return errorMsg;
}
}
| apache-2.0 |
raja15792/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201508/Dimension.java | 54296 |
package com.google.api.ads.dfp.jaxws.v201508;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for Dimension.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="Dimension">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="MONTH_AND_YEAR"/>
* <enumeration value="WEEK"/>
* <enumeration value="DATE"/>
* <enumeration value="DAY"/>
* <enumeration value="HOUR"/>
* <enumeration value="LINE_ITEM_ID"/>
* <enumeration value="LINE_ITEM_NAME"/>
* <enumeration value="LINE_ITEM_TYPE"/>
* <enumeration value="AGGREGATED_DEMAND_CHANNEL"/>
* <enumeration value="ORDER_ID"/>
* <enumeration value="ORDER_NAME"/>
* <enumeration value="ORDER_DELIVERY_STATUS"/>
* <enumeration value="ADVERTISER_ID"/>
* <enumeration value="ADVERTISER_NAME"/>
* <enumeration value="AD_NETWORK_ID"/>
* <enumeration value="AD_NETWORK_NAME"/>
* <enumeration value="SALESPERSON_ID"/>
* <enumeration value="SALESPERSON_NAME"/>
* <enumeration value="CREATIVE_ID"/>
* <enumeration value="CREATIVE_NAME"/>
* <enumeration value="CREATIVE_TYPE"/>
* <enumeration value="CREATIVE_BILLING_TYPE"/>
* <enumeration value="CUSTOM_EVENT_ID"/>
* <enumeration value="CUSTOM_EVENT_NAME"/>
* <enumeration value="CUSTOM_EVENT_TYPE"/>
* <enumeration value="CREATIVE_SIZE"/>
* <enumeration value="AD_UNIT_ID"/>
* <enumeration value="AD_UNIT_NAME"/>
* <enumeration value="PARENT_AD_UNIT_ID"/>
* <enumeration value="PARENT_AD_UNIT_NAME"/>
* <enumeration value="PLACEMENT_ID"/>
* <enumeration value="PLACEMENT_NAME"/>
* <enumeration value="PLACEMENT_STATUS"/>
* <enumeration value="TARGETING"/>
* <enumeration value="DEVICE_CATEGORY_ID"/>
* <enumeration value="DEVICE_CATEGORY_NAME"/>
* <enumeration value="COUNTRY_CRITERIA_ID"/>
* <enumeration value="COUNTRY_NAME"/>
* <enumeration value="REGION_CRITERIA_ID"/>
* <enumeration value="REGION_NAME"/>
* <enumeration value="CITY_CRITERIA_ID"/>
* <enumeration value="CITY_NAME"/>
* <enumeration value="METRO_CRITERIA_ID"/>
* <enumeration value="METRO_NAME"/>
* <enumeration value="POSTAL_CODE_CRITERIA_ID"/>
* <enumeration value="POSTAL_CODE"/>
* <enumeration value="CUSTOM_TARGETING_VALUE_ID"/>
* <enumeration value="CUSTOM_CRITERIA"/>
* <enumeration value="ACTIVITY_ID"/>
* <enumeration value="ACTIVITY_NAME"/>
* <enumeration value="ACTIVITY_GROUP_ID"/>
* <enumeration value="ACTIVITY_GROUP_NAME"/>
* <enumeration value="CONTENT_ID"/>
* <enumeration value="CONTENT_NAME"/>
* <enumeration value="CONTENT_BUNDLE_ID"/>
* <enumeration value="CONTENT_BUNDLE_NAME"/>
* <enumeration value="CONTENT_HIERARCHY"/>
* <enumeration value="VIDEO_FALLBACK_POSITION"/>
* <enumeration value="POSITION_OF_POD"/>
* <enumeration value="POSITION_IN_POD"/>
* <enumeration value="PARTNER_MANAGEMENT_PARTNER_ID"/>
* <enumeration value="PARTNER_MANAGEMENT_PARTNER_NAME"/>
* <enumeration value="PARTNER_MANAGEMENT_PARTNER_LABEL_ID"/>
* <enumeration value="PARTNER_MANAGEMENT_PARTNER_LABEL_NAME"/>
* <enumeration value="GRP_DEMOGRAPHICS"/>
* <enumeration value="AD_REQUEST_SIZE"/>
* <enumeration value="AD_REQUEST_AD_UNIT_SIZES"/>
* <enumeration value="AD_REQUEST_CUSTOM_CRITERIA"/>
* <enumeration value="BUYER_ID"/>
* <enumeration value="BUYER_NAME"/>
* <enumeration value="VERIFIED_ADVERTISER_ID"/>
* <enumeration value="VERIFIED_ADVERTISER_NAME"/>
* <enumeration value="AD_UNIT_STATUS"/>
* <enumeration value="MASTER_COMPANION_CREATIVE_ID"/>
* <enumeration value="MASTER_COMPANION_CREATIVE_NAME"/>
* <enumeration value="PROPOSAL_LINE_ITEM_ID"/>
* <enumeration value="PROPOSAL_LINE_ITEM_NAME"/>
* <enumeration value="PROPOSAL_ID"/>
* <enumeration value="PROPOSAL_NAME"/>
* <enumeration value="ALL_SALESPEOPLE_ID"/>
* <enumeration value="ALL_SALESPEOPLE_NAME"/>
* <enumeration value="SALES_TEAM_ID"/>
* <enumeration value="SALES_TEAM_NAME"/>
* <enumeration value="PROPOSAL_AGENCY_ID"/>
* <enumeration value="PROPOSAL_AGENCY_NAME"/>
* <enumeration value="PRODUCT_ID"/>
* <enumeration value="PRODUCT_NAME"/>
* <enumeration value="PRODUCT_TEMPLATE_ID"/>
* <enumeration value="PRODUCT_TEMPLATE_NAME"/>
* <enumeration value="RATE_CARD_ID"/>
* <enumeration value="RATE_CARD_NAME"/>
* <enumeration value="WORKFLOW_ID"/>
* <enumeration value="WORKFLOW_NAME"/>
* <enumeration value="PACKAGE_ID"/>
* <enumeration value="PACKAGE_NAME"/>
* <enumeration value="PRODUCT_PACKAGE_ID"/>
* <enumeration value="PRODUCT_PACKAGE_NAME"/>
* <enumeration value="AUDIENCE_SEGMENT_ID"/>
* <enumeration value="AUDIENCE_SEGMENT_NAME"/>
* <enumeration value="AUDIENCE_SEGMENT_DATA_PROVIDER_NAME"/>
* <enumeration value="AD_EXCHANGE_AD_SIZE_NAME"/>
* <enumeration value="AD_EXCHANGE_PLATFORM_TYPE_NAME"/>
* <enumeration value="AD_EXCHANGE_PRICING_RULE_NAME"/>
* <enumeration value="AD_EXCHANGE_TAG_NAME"/>
* <enumeration value="AD_EXCHANGE_URL_CHANNEL_NAME"/>
* <enumeration value="AD_EXCHANGE_AD_CLIENT_ID"/>
* <enumeration value="AD_EXCHANGE_CREATIVE_SIZES"/>
* <enumeration value="AD_EXCHANGE_AD_FORMAT_NAME"/>
* <enumeration value="AD_EXCHANGE_CHANNEL_NAME"/>
* <enumeration value="AD_EXCHANGE_PRODUCT_NAME"/>
* <enumeration value="AD_EXCHANGE_SITE_NAME"/>
* <enumeration value="AD_EXCHANGE_REQUEST_SOURCES"/>
* <enumeration value="AD_EXCHANGE_TRANSACTION_TYPE_NAME"/>
* <enumeration value="AD_EXCHANGE_ADVERTISER_NAME"/>
* <enumeration value="AD_EXCHANGE_AGENCY"/>
* <enumeration value="AD_EXCHANGE_BID_TYPE"/>
* <enumeration value="AD_EXCHANGE_BRANDING_TYPE"/>
* <enumeration value="AD_EXCHANGE_BUYER_NETWORK_NAME"/>
* <enumeration value="AD_EXCHANGE_DATE"/>
* <enumeration value="AD_EXCHANGE_DEAL_CPM"/>
* <enumeration value="AD_EXCHANGE_DEAL_ID"/>
* <enumeration value="AD_EXCHANGE_DEAL_NAME"/>
* <enumeration value="AD_EXCHANGE_DEAL_TYPE"/>
* <enumeration value="AD_EXCHANGE_DSP_BUYER_NETWORK_NAME"/>
* <enumeration value="AD_EXCHANGE_EXPANSION_TYPE"/>
* <enumeration value="AD_EXCHANGE_COUNTRY_CODE"/>
* <enumeration value="AD_EXCHANGE_COUNTRY_NAME"/>
* <enumeration value="AD_EXCHANGE_INVENTORY_OWNERSHIP"/>
* <enumeration value="AD_EXCHANGE_LANDING_PAGE_DOMAIN"/>
* <enumeration value="AD_EXCHANGE_MOBILE_APP_NAME"/>
* <enumeration value="AD_EXCHANGE_MOBILE_CARRIER_NAME"/>
* <enumeration value="AD_EXCHANGE_MOBILE_DEVICE_NAME"/>
* <enumeration value="AD_EXCHANGE_MOBILE_INVENTORY_TYPE"/>
* <enumeration value="AD_EXCHANGE_MONTH"/>
* <enumeration value="AD_EXCHANGE_NETWORK_PARTNER_NAME"/>
* <enumeration value="AD_EXCHANGE_OS_VERSION_NAME"/>
* <enumeration value="AD_EXCHANGE_PRICING_RULE_ID"/>
* <enumeration value="AD_EXCHANGE_TAG_CODE"/>
* <enumeration value="AD_EXCHANGE_TARGETING_TYPE"/>
* <enumeration value="AD_EXCHANGE_THIRD_PARTY_BUYER_ACCOUNT_NAME"/>
* <enumeration value="AD_EXCHANGE_THIRD_PARTY_NETWORK_TAG_CURRENCY"/>
* <enumeration value="AD_EXCHANGE_THIRD_PARTY_NETWORK_TAG_NAME"/>
* <enumeration value="AD_EXCHANGE_URL_CHANNEL_ID"/>
* <enumeration value="AD_EXCHANGE_USER_BANDWIDTH_NAME"/>
* <enumeration value="AD_EXCHANGE_VIDEO_AD_DURATION"/>
* <enumeration value="AD_EXCHANGE_VIDEO_AD_DURATION_RAW"/>
* <enumeration value="AD_EXCHANGE_VIDEO_AD_FORMAT"/>
* <enumeration value="AD_EXCHANGE_WEEK"/>
* <enumeration value="NIELSEN_SEGMENT"/>
* <enumeration value="NIELSEN_DEMOGRAPHICS"/>
* <enumeration value="NIELSEN_RESTATEMENT_DATE"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "Dimension")
@XmlEnum
public enum Dimension {
/**
*
* Breaks down reporting data by month and year in the network time zone. Can
* be used to filter on month using ISO 4601 format 'YYYY-MM'.
*
*
*/
MONTH_AND_YEAR,
/**
*
* Breaks down reporting data by week of the year in the network time zone.
* Cannot be used for filtering.
*
*
*/
WEEK,
/**
*
* Breaks down reporting data by date in the network time zone. Can be used to
* filter by date using ISO 8601's format 'YYYY-MM-DD'".
*
*
*/
DATE,
/**
*
* Breaks down reporting data by day of the week in the network time zone. Can
* be used to filter by day of the week using the index of the day (from 1 for
* Monday is 1 to 7 for Sunday).
*
*
*/
DAY,
/**
*
* Breaks down reporting data by hour of the day in the network time zone. Can
* be used to filter by hour of the day (from 0 to 23).
*
*
*/
HOUR,
/**
*
* Breaks down reporting data by {@link LineItem#id}. Can be used to
* filter by {@link LineItem#id}.
*
*
*/
LINE_ITEM_ID,
/**
*
* Breaks down reporting data by line item. {@link LineItem#name} and
* {@link LineItem#id} are automatically included as columns in the report.
* Can be used to filter by {@link LineItem#name}.
*
*
*/
LINE_ITEM_NAME,
/**
*
* Breaks down reporting data by {@link LineItem#lineItemType}. Can be used
* to filter by line item type using {@link LineItemType} enumeration names.
*
*
*/
LINE_ITEM_TYPE,
/**
*
* Breaks down reporting data by aggregated demand channel type.
*
*
*/
AGGREGATED_DEMAND_CHANNEL,
/**
*
* Breaks down reporting data by {@link Order#id}. Can be used to filter by
* {@link Order#id}.
*
*
*/
ORDER_ID,
/**
*
* Breaks down reporting data by order. {@link Order#name} and
* {@link Order#id} are automatically included as columns in the report. Can
* be used to filter by {@link Order#name}.
*
*
*/
ORDER_NAME,
/**
*
* Delivery status of the order. Not available as a dimension to report on,
* but exists as a dimension in order to filter on it using PQL.
* Valid values are 'STARTED', 'NOT_STARTED' and 'COMPLETED'.
*
*
*/
ORDER_DELIVERY_STATUS,
/**
*
* Breaks down reporting data by advertising company {@link Company#id}. Can
* be used to filter by {@link Company#id}.
*
*
*/
ADVERTISER_ID,
/**
*
* Breaks down reporting data by advertising company. {@link Company#name} and
* {@link Company#id} are automatically included as columns in the report.
* Can be used to filter by {@link Company#name}.
*
*
*/
ADVERTISER_NAME,
/**
*
* The network that provided the ad for SDK ad mediation.
*
* <p>
* If selected for a report, that report will include only SDK mediation ads and will not contain
* non-SDK mediation ads.
* </p>
*
* <p>
* SDK mediation ads are ads for mobile devices. They have a list of ad networks which can provide
* ads to serve. Not every ad network will have an ad to serve so the device will try each network
* one-by-one until it finds an ad network with an ad to serve. The ad network that ends up
* serving the ad will appear here. Note that this id does not correlate to anything in the
* companies table and is not the same id as is served by {@link #ADVERTISER_ID}.
* </p>
*
*
*/
AD_NETWORK_ID,
/**
*
* The name of the network defined in {@link #AD_NETWORK_ID}.
*
*
*/
AD_NETWORK_NAME,
/**
*
* Breaks down reporting data by salesperson {@link User#id}. Can be used to
* filter by {@link User#id}.
*
*
*/
SALESPERSON_ID,
/**
*
* Breaks down reporting data by salesperson. {@link User#name} and
* {@link User#id} of the salesperson are automatically included as columns in
* the report. Can be used to filter by {@link User#name}.
*
*
*/
SALESPERSON_NAME,
/**
*
* Breaks down reporting data by {@link Creative#id} or creative set id
* (master's {@link Creative#id}) if the creative is part of a creative set.
* Can be used to filter by {@link Creative#id}.
*
*
*/
CREATIVE_ID,
/**
*
* Breaks down reporting data by creative. {@link Creative#name} and
* {@link Creative#id} are automatically included as columns in the report.
* Can be used to filter by {@link Creative#name}.
*
*
*/
CREATIVE_NAME,
/**
*
* Breaks down reporting data by creative type.
*
*
*/
CREATIVE_TYPE,
/**
*
* Breaks down reporting data by creative billing type.
*
*
*/
CREATIVE_BILLING_TYPE,
/**
*
* Breaks down reporting data by custom event ID.
*
*
*/
CUSTOM_EVENT_ID,
/**
*
* Breaks down reporting data by custom event name.
*
*
*/
CUSTOM_EVENT_NAME,
/**
*
* Breaks down reporting data by custom event type (timer/exit/counter).
*
*
*/
CUSTOM_EVENT_TYPE,
/**
*
* Breaks down reporting data by {@link Creative#size}. Cannot be used for
* filtering.
*
*
*/
CREATIVE_SIZE,
/**
*
* Breaks down reporting data by {@link AdUnit#id}. Can be used to filter by
* {@link AdUnit#id}. {@link #AD_UNIT_NAME}, i.e. {@link AdUnit#name}, is
* automatically included as a dimension in the report.
*
*
*/
AD_UNIT_ID,
/**
*
* Breaks down reporting data by ad unit. {@link AdUnit#name} and
* {@link AdUnit#id} are automatically included as columns in the report. Can
* be used to filter by {@link AdUnit#name}.
*
*
*/
AD_UNIT_NAME,
/**
*
* Used to filter on all the descendants of an ad unit by {@link AdUnit#id}.
* Not available as a dimension to report on.
*
*
*/
PARENT_AD_UNIT_ID,
/**
*
* Used to filter on all the descendants of an ad unit by {@link AdUnit#name}.
* Not available as a dimension to report on.
*
*
*/
PARENT_AD_UNIT_NAME,
/**
*
* Breaks down reporting data by {@link Placement#id}. Can be used to filter
* by {@link Placement#id}.
*
*
*/
PLACEMENT_ID,
/**
*
* Breaks down reporting data by placement. {@link Placement#name} and
* {@link Placement#id} are automatically included as columns in the report.
* Can be used to filter by {@link Placement#name}.
*
*
*/
PLACEMENT_NAME,
/**
*
* Status of the placement. Not available as a dimension to report on, but
* exists as a dimension in order to filter on it using PQL. Can be used to
* filter on {@link Placement#status} by using {@link InventoryStatus}
* enumeration names.
*
*
*/
PLACEMENT_STATUS,
/**
*
* Breaks down reporting data by criteria predefined by DoubleClick For
* Publishers like the operating system, browser etc. Cannot be used for
* filtering.
*
*
*/
TARGETING,
/**
*
* The ID of the device category to which an ad is being targeted.
*
* Can be used to filter by device category ID.
*
*
*/
DEVICE_CATEGORY_ID,
/**
*
* The category of device (smartphone, feature phone, tablet, or desktop) to which an ad is being
* targeted.
*
* Can be used to filter by device category name.
*
*
*/
DEVICE_CATEGORY_NAME,
/**
*
* Breaks down reporting data by country criteria ID. Can be used to filter by
* country criteria ID.
*
*
*/
COUNTRY_CRITERIA_ID,
/**
*
* Breaks down reporting data by country name. The country name and the
* country criteria ID are automatically included as columns in the report.
* Can be used to filter by country name using the US English name.
*
*
*/
COUNTRY_NAME,
/**
*
* Breaks down reporting data by region criteria ID. Can be used to filter by
* region criteria ID.
*
*
*/
REGION_CRITERIA_ID,
/**
*
* Breaks down reporting data by region name. The region name and the region
* criteria ID are automatically included as columns in the report. Can be
* used to filter by region name using the US English name.
*
*
*/
REGION_NAME,
/**
*
* Breaks down reporting data by city criteria ID. Can be used to filter by
* city criteria ID.
*
*
*/
CITY_CRITERIA_ID,
/**
*
* Breaks down reporting data by city name. The city name and the city
* criteria ID are automatically included as columns in the report. Can be
* used to filter by city name using the US English name.
*
*
*/
CITY_NAME,
/**
*
* Breaks down reporting data by metro criteria ID. Can be used to filter by
* metro criteria ID.
*
*
*/
METRO_CRITERIA_ID,
/**
*
* Breaks down reporting data by metro name. The metro name and the metro
* criteria ID are automatically included as columns in the report. Can be
* used to filter by metro name using the US English name.
*
*
*/
METRO_NAME,
/**
*
* Breaks down reporting data by postal code criteria ID. Can be used to
* filter by postal code criteria ID.
*
*
*/
POSTAL_CODE_CRITERIA_ID,
/**
*
* Breaks down reporting data by postal code. The postal code and the postal
* code criteria ID are automatically included as columns in the report. Can
* be used to filter by postal code.
*
*
*/
POSTAL_CODE,
/**
*
* Breaks down reporting data by {@link CustomTargetingValue#id}. Can be used
* to filter by {@link CustomTargetingValue#id}.
*
*
*/
CUSTOM_TARGETING_VALUE_ID,
/**
*
* Breaks down reporting data by custom criteria. The {@link CustomTargetingValue} is
* displayed in the form:
* <ul>
* <li>
* car=honda when value match type is
* {@link CustomTargetingValue.MatchType#EXACT}
* </li>
* <li>
* car~honda when value match type is
* {@link CustomTargetingValue.MatchType#BROAD}
* </li>
* <li>
* car=*honda when value match type is
* {@link CustomTargetingValue.MatchType#PREFIX}
* </li>
* <li>
* car~*honda when value match type is
* {@link CustomTargetingValue.MatchType#BROAD_PREFIX}
* </li>
* </ul>
* {@link #CUSTOM_TARGETING_VALUE_ID}, i.e. {@link CustomTargetingValue#id} is
* automatically included as a column in the report.
* Cannot be used for filtering; use {@link #CUSTOM_TARGETING_VALUE_ID} instead.
* <p>
* When using this {@code Dimension}, metrics for freeform key values are only
* reported on when they are registered with {@code CustomTargetingService}.
*
*
*/
CUSTOM_CRITERIA,
/**
*
* Breaks down reporting data by activity ID. Can be used to filter by
* activity ID.
*
*
*/
ACTIVITY_ID,
/**
*
* Breaks down reporting data by activity. The activity name and the activity
* ID are automatically included as columns in the report. Can be used to
* filter by activity name.
*
*
*/
ACTIVITY_NAME,
/**
*
* Breaks down reporting data by activity group ID. Can be used to filter by
* activity group ID.
*
*
*/
ACTIVITY_GROUP_ID,
/**
*
* Breaks down reporting data by activity group. The activity group name and
* the activity group ID are automatically included as columns in the report.
* Can be used to filter by activity group name.
*
*
*/
ACTIVITY_GROUP_NAME,
/**
*
* Breaks down reporting data by {@link Content#id}. Can be used to filter by
* {@link Content#id}.
*
*
*/
CONTENT_ID,
/**
*
* Breaks down reporting data by content. {@link Content#name} and
* {@link Content#id} are automatically included as columns in the report. Can
* be used to filter by {@link Content#name}.
*
*
*/
CONTENT_NAME,
/**
*
* Breaks down reporting data by {@link ContentBundle#id}. Can be used to filter
* by {@link ContentBundle#id}.
*
*
*/
CONTENT_BUNDLE_ID,
/**
*
* Breaks down reporting data by content bundle. {@link ContentBundle#name} and
* {@link ContentBundle#id} are automatically included as columns in the
* report. Can be used to filter by {@link ContentBundle#name}.
*
*
*/
CONTENT_BUNDLE_NAME,
/**
*
* Breaks down reporting data by the content hierarchy. To use this dimension, a list of custom
* targeting key IDs must be specified in
* {@link ReportQuery#contentMetadataKeyHierarchyCustomTargetingKeyIds}.
* <p>
* This dimension can be used as a filter in the {@link Statement} in PQL syntax:
* CONTENT_HIERARCHY_CUSTOM_TARGETING_KEY[contentMetadataKeyHierarchyCustomTargetingKeyId]_ID =
* {@link CustomTargetingValue#id custom targeting value ID}
* <p>
* For example: WHERE CONTENT_HIERARCHY_CUSTOM_TARGETING_KEY[4242]_ID = 53423
*
*
*/
CONTENT_HIERARCHY,
/**
*
* Breaks down reporting data by the fallback position of the video ad, i.e.,
* {@code NON_FALLBACK}, {@code FALLBACK_POSITION_1}, {@code FALLBACK_POSITION_2}, etc. Can be
* used for filtering.
*
*
*/
VIDEO_FALLBACK_POSITION,
/**
*
* Breaks down reporting data by the position of the video ad within the video stream, i.e.,
* {@code UNKNOWN_POSITION}, {@code PREROLL}, {@code POSTROLL}, {@code UNKNOWN_MIDROLL},
* {@code MIDROLL_1}, {@code MIDROLL_2}, etc. {@code UNKNOWN_MIDROLL} represents a midroll, but
* which specific midroll is unknown. Can be used for filtering.
*
*
*/
POSITION_OF_POD,
/**
*
* Breaks down reporting data by the position of the video ad within the pod, i.e.,
* {@code UNKNOWN_POSITION}, {@code POSITION_1}, {@code POSITION_2}, etc.
* Can be used for filtering.
*
*
*/
POSITION_IN_POD,
/**
*
* Breaks down reporting data by partner {@link Company#id}.
*
*
*/
PARTNER_MANAGEMENT_PARTNER_ID,
/**
*
* Breaks down reporting data by partner {@link Company#name} and {@link Company#id} are
* automatically included as columns in the report.
*
*
*/
PARTNER_MANAGEMENT_PARTNER_NAME,
/**
*
* Breaks down reporting data by partner label {@link Label#id}.
*
*
*/
PARTNER_MANAGEMENT_PARTNER_LABEL_ID,
/**
*
* Breaks down reporting data by partner label. {@link Label#name} and {@link Label#id} are
* automatically included as columns in the report.
*
*
*/
PARTNER_MANAGEMENT_PARTNER_LABEL_NAME,
/**
*
* Breaks down reporting data by gender and age group, i.e., MALE_13_TO_17, MALE_18_TO_24,
* MALE_25_TO_34, MALE_35_TO_44, MALE_45_TO_54, MALE_55_TO_64, MALE_65_PLUS, FEMALE_13_TO_17,
* FEMALE_18_TO_24, FEMALE_25_TO_34, FEMALE_35_TO_44, FEMALE_45_TO_54, FEMALE_55_TO_64,
* FEMALE_65_PLUS, UNKNOWN_0_TO_17 and UNKNOWN.
* Whenever this dimension is selected, {@link #COUNTRY_NAME} must be selected.
*
* <p>
* This dimension is supported only for GRP columns.
*
*
*/
GRP_DEMOGRAPHICS,
/**
*
* Size of the creative requested for an ad.
*
*
*/
AD_REQUEST_SIZE,
/**
*
* Breaks down reporting data by the ad unit sizes specified in ad requests.
*
* <p>Formatted as comma separated values, e.g. "300x250,300x250v,300x60".
*
* <p>This dimension is supported only for sell-through columns.
*
*
*/
AD_REQUEST_AD_UNIT_SIZES,
/**
*
* Breaks down reporting data by the custom criteria specified in ad requests.
*
* <p>Formatted as comma separated
* {@link CustomTargetingKey key}-{@link CustomTargetingValue values}, where a key-value is
* formatted as {@code key1=value_1|...|value_n,key2=value_1|...|value_n,...}.
*
* <p>This dimension is supported only for sell-through columns.
*
*
*/
AD_REQUEST_CUSTOM_CRITERIA,
/**
*
* The unique identifier used for an ad network that is associated with the
* company that the ad is served for.
*
*
*/
BUYER_ID,
/**
*
* The name of the ad network that is associated with the company that the ad is served for.
*
*
*/
BUYER_NAME,
/**
*
* ID of the advertiser that filled the ad either directly (through DFP) or indirectly via
* Google Ad Exchange or another ad network or exchange.
*
*
*/
VERIFIED_ADVERTISER_ID,
/**
*
* Name of the advertiser that filled the ad either directly (through DFP) or indirectly
* via Google Ad Exchange or another ad network or exchange.
*
*
*/
VERIFIED_ADVERTISER_NAME,
/**
*
* Status of the ad unit. Not available as a dimension to report on,
* but exists as a dimension in order to filter on it using PQL.
* Valid values correspond to {@link InventoryStatus}.
*
*
*/
AD_UNIT_STATUS,
/**
*
* Breaks down reporting data by {@link Creative#id}. This includes regular creatives,
* and master and companions in case of creative sets.
*
*
*/
MASTER_COMPANION_CREATIVE_ID,
/**
*
* Breaks down reporting data by creative. This includes regular creatives,
* and master and companions in case of creative sets.
*
*
*/
MASTER_COMPANION_CREATIVE_NAME,
/**
*
* Breaks down reporting data by {@link ProposalLineItem#id}. Can be used to filter by
* {@link ProposalLineItem#id}.
*
*
*/
PROPOSAL_LINE_ITEM_ID,
/**
*
* Breaks down reporting data by {@link ProposalLineItem#name}. Can be used to filter by
* {@link ProposalLineItem#name}.
*
*
*/
PROPOSAL_LINE_ITEM_NAME,
/**
*
* Breaks down reporting data by {@link Proposal#id}. Can be used to filter by
* {@link Proposal#id}.
*
*
*/
PROPOSAL_ID,
/**
*
* Breaks down reporting data by {@link Proposal#name}. Can be used to filter by
* {@link Proposal#name}.
*
*
*/
PROPOSAL_NAME,
/**
*
* Breaks down reporting data by salesperson {@link User#id}, including both salesperson and
* secondary salespeople. Can be used to filter by all salespeople {@link User#id}.
*
*
*/
ALL_SALESPEOPLE_ID,
/**
*
* Breaks down reporting data by salesperson {@link User#name}, including both salesperson and
* secondary salespeople. Can be used to filter by all salespeople {@link User#name}.
*
*
*/
ALL_SALESPEOPLE_NAME,
/**
*
* Used to filter by {@link User#id} in sales team. Sales team includes salesperson, secondary
* salesperson, sales planners. Not available as a dimension to report on.
*
*
*/
SALES_TEAM_ID,
/**
*
* Used to filter by {@link User#name} in sales team. Sales team includes salesperson, secondary
* salesperson, sales planners. Not available as a dimension to report on.
*
*
*/
SALES_TEAM_NAME,
/**
*
* Breaks down reporting data by proposal agency {@link Company#id}. Can be used to filter by
* proposal agency {@link Company#id}.
*
*
*/
PROPOSAL_AGENCY_ID,
/**
*
* Breaks down reporting data by proposal agency {@link Company#name}. Can be used to filter by
* proposal agency {@link Company#name}.
*
*
*/
PROPOSAL_AGENCY_NAME,
/**
*
* Breaks down reporting data by {@link Product#id}. Can be used to filter by {@link Product#id}.
*
*
*/
PRODUCT_ID,
/**
*
* Breaks down reporting data by {@link Product#name}.
*
*
*/
PRODUCT_NAME,
/**
*
* Breaks down reporting data by {@link ProductTemplate#id}. Can be used to filter by
* {@link ProductTemplate#id}.
*
*
*/
PRODUCT_TEMPLATE_ID,
/**
*
* Breaks down reporting data by {@link ProductTemplate#name}. Can be used to filter by
* {@link ProductTemplate#name}.
*
*
*/
PRODUCT_TEMPLATE_NAME,
/**
*
* Breaks down reporting data by {@link RateCard#id}. Can be used to filter by
* {@link RateCard#id}.
*
*
*/
RATE_CARD_ID,
/**
*
* Breaks down reporting data by {@link RateCard#name}. Can be used to filter by
* {@link RateCard#name}.
*
*
*/
RATE_CARD_NAME,
/**
*
* Used to filter by {@link Workflow#id}. Not available as a dimension to report on.
*
*
*/
WORKFLOW_ID,
/**
*
* Used to filter by {@link Workflow#name}. Not available as a dimension to report on.
*
*
*/
WORKFLOW_NAME,
/**
*
* Breaks down reporting data by {@link Package#id}.
*
*
*/
PACKAGE_ID,
/**
*
* Breaks down reporting data by {@link Package#name}.
*
*
*/
PACKAGE_NAME,
/**
*
* Breaks down reporting data by {@link ProductPackage#id}. Can be used to filter by
* {@link ProductPackage#id}.
*
*
*/
PRODUCT_PACKAGE_ID,
/**
*
* Breaks down reporting data by {@link ProductPackage#name}. Can be used to filter by
* {@link ProductPackage#name}.
*
*
*/
PRODUCT_PACKAGE_NAME,
/**
*
* Breaks down reporting data by billable audience segment ID.
*
*
*/
AUDIENCE_SEGMENT_ID,
/**
*
* Breaks down reporting data by billable audience segment name.
*
*
*/
AUDIENCE_SEGMENT_NAME,
/**
*
* Breaks down reporting data by audience segment data provider name.
*
*
*/
AUDIENCE_SEGMENT_DATA_PROVIDER_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange ad size. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_AD_SIZE_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange platforms. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_PLATFORM_TYPE_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange pricing rules. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_PRICING_RULE_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange tags. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_TAG_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange URLs. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_URL_CHANNEL_NAME,
/**
*
* Breaks down data by Ad Exchange linked web properties. This experimental dimension only works
* with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_AD_CLIENT_ID,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange creative size. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_CREATIVE_SIZES,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange ad types. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_AD_FORMAT_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange channels. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_CHANNEL_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange products. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_PRODUCT_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange sites. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_SITE_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange request sources. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_REQUEST_SOURCES,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange ad transaction. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_TRANSACTION_TYPE_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by the Ad Exchange advertiser name that bids
* on ads. This experimental dimension only works with Ad Exchange web properties linked with an
* active status.
*
*
*/
AD_EXCHANGE_ADVERTISER_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange agency. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_AGENCY,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange bid type. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_BID_TYPE,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange branding type. Examples:
* Branded, Anonymous. This experimental dimension only works with Ad Exchange web properties
* linked with an active status.
*
*
*/
AD_EXCHANGE_BRANDING_TYPE,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange ad network name. Example:
* Google Adwords. This experimental dimension only works with Ad Exchange web properties linked
* with an active status.
*
*
*/
AD_EXCHANGE_BUYER_NETWORK_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange date. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_DATE,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange deal CPM cost. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_DEAL_CPM,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange deal id. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_DEAL_ID,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange deal name. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_DEAL_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange deal/transaction type. Example:
* Open auction. This experimental dimension only works with Ad Exchange web properties linked
* with an active status.
*
*
*/
AD_EXCHANGE_DEAL_TYPE,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange DSP buyer network name. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_DSP_BUYER_NETWORK_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange expansion type. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_EXPANSION_TYPE,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange country code. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_COUNTRY_CODE,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange country name. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_COUNTRY_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange inventory ownership. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_INVENTORY_OWNERSHIP,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange advertiser landing page domain.
* This experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_LANDING_PAGE_DOMAIN,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange mobile app name. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_MOBILE_APP_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange mobile carrier name. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_MOBILE_CARRIER_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange mobile device name. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_MOBILE_DEVICE_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange mobile inventory type. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_MOBILE_INVENTORY_TYPE,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange month. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_MONTH,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange partner name. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_NETWORK_PARTNER_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange operating system version. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_OS_VERSION_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange pricing rule id. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_PRICING_RULE_ID,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange tags. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_TAG_CODE,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange targeting type. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_TARGETING_TYPE,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange third party buyer account name.
* This experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_THIRD_PARTY_BUYER_ACCOUNT_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange third-party network tag
* currency. This experimental dimension only works with Ad Exchange web properties linked with
* an active status.
*
*
*/
AD_EXCHANGE_THIRD_PARTY_NETWORK_TAG_CURRENCY,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange network tag name. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_THIRD_PARTY_NETWORK_TAG_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange channel id. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_URL_CHANNEL_ID,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange user bandwidth. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_USER_BANDWIDTH_NAME,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange video ad duration. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_VIDEO_AD_DURATION,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange raw video ad duration. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_VIDEO_AD_DURATION_RAW,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange video ad type. This
* experimental dimension only works with Ad Exchange web properties linked with an active
* status.
*
*
*/
AD_EXCHANGE_VIDEO_AD_FORMAT,
/**
*
* Breaks down linked Ad Exchange web property data by Ad Exchange week. This experimental
* dimension only works with Ad Exchange web properties linked with an active status.
*
*
*/
AD_EXCHANGE_WEEK,
/**
*
* Campaign date segment of Nielsen Digital Ad Ratings reporting.
*
*
*/
NIELSEN_SEGMENT,
/**
*
* Breaks down reporting data by gender and age group, i.e., MALE_18_TO_20, MALE_21_TO_24,
* MALE_25_TO_29, MALE_30_TO_35, MALE_35_TO_39, MALE_40_TO_44, MALE_45_TO_49, MALE_50_TO_54,
* MALE_55_TO_64, MALE_65_PLUS, FEMALE_18_TO_20, FEMALE_21_TO_24, FEMALE_25_TO_29,
* FEMALE_30_TO_34, FEMALE_35_TO_39, FEMALE_40_TO_44, FEMALE_45_TO_49, FEMALE_50_TO_54,
* FEMALE_55_TO_64, FEMALE_65_PLUS, and OTHER.
*
*
*/
NIELSEN_DEMOGRAPHICS,
/**
*
* Data restatement date of Nielsen Digital Ad Ratings data.
*
*
*/
NIELSEN_RESTATEMENT_DATE;
public String value() {
return name();
}
public static Dimension fromValue(String v) {
return valueOf(v);
}
}
| apache-2.0 |
jivesoftware/robot-intellij-plugin | src/main/java/com/jivesoftware/robot/intellij/plugin/lang/RobotPsiFile.java | 604 | package com.jivesoftware.robot.intellij.plugin.lang;
import com.intellij.extapi.psi.PsiFileBase;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.psi.FileViewProvider;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
public class RobotPsiFile extends PsiFileBase {
public RobotPsiFile(@NotNull FileViewProvider viewProvider) {
super(viewProvider, RobotLanguage.INSTANCE);
}
@NotNull
@Override
public FileType getFileType() {
return RobotFileType.INSTANCE;
}
@Override
public Icon getIcon(int flags) {
return super.getIcon(flags);
}
}
| apache-2.0 |
pinnamur/titanium_mobile | android/modules/ui/src/java/ti/modules/titanium/ui/LabelProxy.java | 1593 | /**
* Appcelerator Titanium Mobile
* Copyright (c) 2009-2013 by Appcelerator, Inc. All Rights Reserved.
* Licensed under the terms of the Apache Public License
* Please see the LICENSE included with this distribution for details.
*/
package ti.modules.titanium.ui;
import org.appcelerator.kroll.KrollDict;
import org.appcelerator.kroll.annotations.Kroll;
import org.appcelerator.titanium.TiC;
import org.appcelerator.titanium.TiContext;
import org.appcelerator.titanium.proxy.TiViewProxy;
import org.appcelerator.titanium.view.TiUIView;
import ti.modules.titanium.ui.widget.TiUILabel;
import android.app.Activity;
@Kroll.proxy(creatableInModule=UIModule.class, propertyAccessors = {
TiC.PROPERTY_AUTO_LINK,
TiC.PROPERTY_COLOR,
TiC.PROPERTY_ELLIPSIZE,
TiC.PROPERTY_FONT,
TiC.PROPERTY_HIGHLIGHTED_COLOR,
TiC.PROPERTY_HTML,
TiC.PROPERTY_TEXT,
TiC.PROPERTY_TEXT_ALIGN,
TiC.PROPERTY_TEXTID,
TiC.PROPERTY_WORD_WRAP,
TiC.PROPERTY_VERTICAL_ALIGN,
TiC.PROPERTY_SHADOW_OFFSET,
TiC.PROPERTY_SHADOW_COLOR,
TiC.PROPERTY_SHADOW_RADIUS,
TiC.PROPERTY_INCLUDE_FONT_PADDING
})
public class LabelProxy extends TiViewProxy
{
public LabelProxy()
{
defaultValues.put(TiC.PROPERTY_TEXT, "");
}
public LabelProxy(TiContext tiContext)
{
this();
}
@Override
protected KrollDict getLangConversionTable()
{
KrollDict table = new KrollDict();
table.put(TiC.PROPERTY_TEXT, TiC.PROPERTY_TEXTID);
return table;
}
@Override
public TiUIView createView(Activity activity)
{
return new TiUILabel(this);
}
@Override
public String getApiName()
{
return "Ti.UI.Label";
}
}
| apache-2.0 |
wjbuys/blox | data-service/src/main/java/com/amazonaws/blox/dataservice/api/DescribeEnvironmentRevisionApi.java | 2781 | /*
* Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may
* not use this file except in compliance with the License. A copy of the
* License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "LICENSE" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.blox.dataservice.api;
import com.amazonaws.blox.dataservice.mapper.ApiModelMapper;
import com.amazonaws.blox.dataservice.model.EnvironmentId;
import com.amazonaws.blox.dataservice.model.EnvironmentRevision;
import com.amazonaws.blox.dataservice.repository.EnvironmentRepository;
import com.amazonaws.blox.dataservicemodel.v1.exception.InternalServiceException;
import com.amazonaws.blox.dataservicemodel.v1.exception.ResourceNotFoundException;
import com.amazonaws.blox.dataservicemodel.v1.model.wrappers.DescribeEnvironmentRevisionRequest;
import com.amazonaws.blox.dataservicemodel.v1.model.wrappers.DescribeEnvironmentRevisionResponse;
import lombok.NonNull;
import lombok.Value;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Slf4j
@Component
@Value
public class DescribeEnvironmentRevisionApi {
@NonNull private ApiModelMapper apiModelMapper;
@NonNull private EnvironmentRepository environmentRepository;
public DescribeEnvironmentRevisionResponse describeEnvironmentRevision(
@NonNull final DescribeEnvironmentRevisionRequest describeEnvironmentRevisionRequest)
throws ResourceNotFoundException, InternalServiceException {
final com.amazonaws.blox.dataservicemodel.v1.model.EnvironmentId environmentIdFromRequest =
describeEnvironmentRevisionRequest.getEnvironmentId();
final EnvironmentId environmentId =
apiModelMapper.toModelEnvironmentId(environmentIdFromRequest);
final String environmentRevisionId =
describeEnvironmentRevisionRequest.getEnvironmentRevisionId();
try {
final EnvironmentRevision environmentRevision =
environmentRepository.getEnvironmentRevision(environmentId, environmentRevisionId);
return DescribeEnvironmentRevisionResponse.builder()
.environmentRevision(apiModelMapper.toWrapperEnvironmentRevision(environmentRevision))
.build();
} catch (final ResourceNotFoundException | InternalServiceException e) {
log.error(e.getMessage(), e);
throw e;
} catch (final Exception e) {
log.error(e.getMessage(), e);
throw new InternalServiceException(e.getMessage(), e);
}
}
}
| apache-2.0 |
massakam/pulsar | pulsar-functions/instance/src/main/java/org/apache/pulsar/functions/instance/go/package-info.java | 858 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.functions.instance.go;
| apache-2.0 |
jexp/idea2 | platform/util/src/com/intellij/util/diff/IntLCS.java | 4253 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.diff;
import java.util.Arrays;
/**
* @author dyoma
*/
class IntLCS {
private final int[] myFirst;
private final int[] mySecond;
private final LinkedDiffPaths myPathsMatrix;
private final int[] myPrevPathKey;
private int[] myPrevEnds;
private int[] myCurrentEnds;
private final int myMaxX;
private final int myMaxY;
public IntLCS(int[] first, int[] second) {
myFirst = first;
mySecond = second;
myMaxX = myFirst.length;
myMaxY = mySecond.length;
myPathsMatrix = new LinkedDiffPaths(myMaxX, myMaxY);
myPrevPathKey = new int[myMaxX + myMaxY + 1];
Arrays.fill(myPrevPathKey, -1);
myPrevEnds = new int[myMaxX + myMaxY + 1];
myCurrentEnds = new int[myMaxX + myMaxY + 1];
}
public int execute() {
for (int d =0; d <= myMaxX + myMaxY; d++) {
int minDiag = -calcBound(myMaxY, d);
int maxDiag = calcBound(myMaxX, d);
if (d != 0)
System.arraycopy(myPrevEnds, minDiag + myMaxY, myCurrentEnds, minDiag + myMaxY, maxDiag - minDiag);
else {
int end = skipEquals(0, 0);
if (end > 0) {
int xy = (end) - 1;
myPrevPathKey[myMaxY] = myPathsMatrix.encodeStep(xy, xy, end, false, -1);
}
if (myMaxX == myMaxY && end == myMaxX) return 0;
myPrevEnds[myMaxY] = end;
continue;
}
for (int k = minDiag; k <= maxDiag; k += 2) {
int end;
if (k == -d) {
int prevEndV = myPrevEnds[k + 1 + myMaxY];
int vertical = findDiagonalEnd(k + 1, prevEndV, true);
end = encodeStep(prevEndV, vertical, k, true);
} else if (k == d) {
int prevEndH = myPrevEnds[k - 1 + myMaxY];
int horisontal = findDiagonalEnd(k - 1, prevEndH, false);
end = encodeStep(prevEndH, horisontal, k, false);
} else {
int prevEndH = myPrevEnds[k - 1 + myMaxY];
int prevEndV = myPrevEnds[k + 1 + myMaxY];
if (prevEndH+1 > prevEndV) {
int horisontal = findDiagonalEnd(k - 1, prevEndH, false);
end = encodeStep(prevEndH, horisontal, k, false);
} else {
int vertical = findDiagonalEnd(k + 1, prevEndV, true);
end = encodeStep(prevEndV, vertical, k, true);
}
}
myCurrentEnds[k + myMaxY] = end;
if (k == myMaxX - myMaxY && end == myMaxX) return d;
}
int[] temps = myCurrentEnds;
myCurrentEnds = myPrevEnds;
myPrevEnds = temps;
}
throw new RuntimeException();
}
public LinkedDiffPaths getPaths() {
return myPathsMatrix;
}
private int findDiagonalEnd(int prevDiagonal, int prevEnd, boolean isVertical) {
int x = prevEnd;
int y = x - prevDiagonal;
if (isVertical) y++;
else x++;
return skipEquals(x, y);
}
private int encodeStep(int prevEnd, int diagLength, int tDiagonal, boolean afterVertical) {
int end = prevEnd + diagLength;
int prevDiagonal = tDiagonal + myMaxY;
if (!afterVertical) end++;
if (afterVertical) prevDiagonal++;
else prevDiagonal--;
int x = end - 1;
int y = x - tDiagonal;
if (x == -1 || y == -1 || x >= myMaxX || y >= myMaxY) return end;
myPrevPathKey[tDiagonal + myMaxY] = myPathsMatrix.encodeStep(x, y, diagLength, afterVertical, myPrevPathKey[prevDiagonal]);
return end;
}
private int calcBound(int bound, int d) {
return (d <= bound) ? d : 2 * bound - d;
}
private int skipEquals(int x, int y) {
int skipped = 0;
while (x < myMaxX && y < myMaxY && myFirst[x] == mySecond[y]) {
skipped += 1;
x++;
y++;
}
return skipped;
}
}
| apache-2.0 |
forcedotcom/aura | aura-impl/src/test/java/org/auraframework/impl/java/controller/DebugController.java | 1404 | /*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.auraframework.impl.java.controller;
import java.util.LinkedHashMap;
import java.util.Map;
import org.auraframework.annotations.Annotations.ServiceComponent;
import org.auraframework.ds.servicecomponent.Controller;
import org.auraframework.service.DefinitionService;
import org.auraframework.system.Annotations.AuraEnabled;
import com.google.common.collect.Maps;
import javax.inject.Inject;
@ServiceComponent
public class DebugController implements Controller {
@Inject
private DefinitionService definitionService;
@AuraEnabled
public Map<String,String> getInfo() throws Exception {
LinkedHashMap<String, String> map = Maps.newLinkedHashMap();
map.put("DefinitionService.hashCode()", "" + definitionService.hashCode());
return map;
}
}
| apache-2.0 |
quattor/pan | panc/src/main/java/org/quattor/pan/dml/functions/IsLong.java | 1408 | /*
Copyright (c) 2006 Charles A. Loomis, Jr, Cedric Duprilot, and
Centre National de la Recherche Scientifique (CNRS).
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
$HeadURL: https://svn.lal.in2p3.fr/LCG/QWG/panc/trunk/src/org/quattor/pan/dml/functions/IsLong.java $
$Id: IsLong.java 3107 2008-04-07 07:03:42Z loomis $
*/
package org.quattor.pan.dml.functions;
import org.quattor.pan.dml.Operation;
import org.quattor.pan.dml.data.LongProperty;
import org.quattor.pan.exceptions.SyntaxException;
import org.quattor.pan.ttemplate.SourceRange;
/**
* Wrapper that creates an is_long function call from the IsOfType class.
*
* @author loomis
*
*/
final public class IsLong {
private IsLong() {
}
public static Operation getInstance(SourceRange sourceRange,
Operation... operations) throws SyntaxException {
return IsOfType.getInstance(sourceRange, LongProperty.class, "is_long",
operations);
}
}
| apache-2.0 |
datanucleus/tests | jdo/general/src/java/org/datanucleus/samples/detach/ClassSuperElements.java | 1659 | /**********************************************************************
Copyright (c) Aug 5, 2004 Erik Bengtson and others.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributors:
...
**********************************************************************/
package org.datanucleus.samples.detach;
import java.util.List;
/**
* @author Erik Bengtson
* @version $Revision: 1.1 $
*/
public class ClassSuperElements
{
List toCheckPrefetchInSuper;
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals(Object obj)
{
if( !(obj instanceof ClassSuperElements) )
{
return false;
}
ClassSuperElements elm = (ClassSuperElements) obj;
return toCheckPrefetchInSuper.equals(elm.toCheckPrefetchInSuper);
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
public int hashCode()
{
int hash = (toCheckPrefetchInSuper == null ? 1 : toCheckPrefetchInSuper.hashCode());
hash = hash ^ super.hashCode();
return hash;
}
}
| apache-2.0 |
xiangyong/spring-test-dbunit | spring-test-dbunit/src/test/java/com/github/springtestdbunit/setup/TruncateSetupOnMethodTest.java | 1895 | /*
* Copyright 2002-2015 the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.springtestdbunit.setup;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestExecutionListeners;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.support.DependencyInjectionTestExecutionListener;
import org.springframework.transaction.annotation.Transactional;
import com.github.springtestdbunit.TransactionDbUnitTestExecutionListener;
import com.github.springtestdbunit.annotation.DatabaseOperation;
import com.github.springtestdbunit.annotation.DatabaseSetup;
import com.github.springtestdbunit.entity.EntityAssert;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("/META-INF/dbunit-context.xml")
@TestExecutionListeners({ DependencyInjectionTestExecutionListener.class, TransactionDbUnitTestExecutionListener.class })
@Transactional
public class TruncateSetupOnMethodTest {
@Autowired
private EntityAssert entityAssert;
@Test
@DatabaseSetup(type = DatabaseOperation.TRUNCATE_TABLE, value = "/META-INF/db/delete.xml")
public void test() throws Exception {
this.entityAssert.assertValues();
}
}
| apache-2.0 |
hejunbinlan/RapidORM | library/src/main/java/com/wangjie/rapidorm/core/generate/withoutreflection/ModelWithoutReflection.java | 248 | package com.wangjie.rapidorm.core.generate.withoutreflection;
/**
* Author: wangjie
* Email: tiantian.china.2@gmail.com
* Date: 6/29/15.
*/
@Deprecated
public interface ModelWithoutReflection<T> {
// IModelProperty<T> getModelProperty();
}
| apache-2.0 |
oscerd/camel | components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsBinding.java | 29154 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jms;
import java.io.File;
import java.io.InputStream;
import java.io.Reader;
import java.io.Serializable;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.jms.BytesMessage;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.MapMessage;
import javax.jms.Message;
import javax.jms.MessageFormatException;
import javax.jms.ObjectMessage;
import javax.jms.Session;
import javax.jms.StreamMessage;
import javax.jms.TextMessage;
import org.w3c.dom.Node;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.NoTypeConversionAvailableException;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.StreamCache;
import org.apache.camel.WrappedFile;
import org.apache.camel.impl.DefaultExchangeHolder;
import org.apache.camel.spi.HeaderFilterStrategy;
import org.apache.camel.util.CamelContextHelper;
import org.apache.camel.util.ExchangeHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.camel.component.jms.JmsConstants.JMS_X_GROUP_ID;
import static org.apache.camel.component.jms.JmsMessageHelper.normalizeDestinationName;
import static org.apache.camel.component.jms.JmsMessageType.Bytes;
import static org.apache.camel.component.jms.JmsMessageType.Map;
import static org.apache.camel.component.jms.JmsMessageType.Object;
import static org.apache.camel.component.jms.JmsMessageType.Text;
/**
* A Strategy used to convert between a Camel {@link Exchange} and {@link JmsMessage}
* to and from a JMS {@link Message}
*
* @version
*/
public class JmsBinding {
private static final Logger LOG = LoggerFactory.getLogger(JmsBinding.class);
private final JmsEndpoint endpoint;
private final HeaderFilterStrategy headerFilterStrategy;
private final JmsKeyFormatStrategy jmsKeyFormatStrategy;
private final MessageCreatedStrategy messageCreatedStrategy;
public JmsBinding() {
this.endpoint = null;
this.headerFilterStrategy = new JmsHeaderFilterStrategy(false);
this.jmsKeyFormatStrategy = new DefaultJmsKeyFormatStrategy();
this.messageCreatedStrategy = null;
}
public JmsBinding(JmsEndpoint endpoint) {
this.endpoint = endpoint;
if (endpoint.getHeaderFilterStrategy() != null) {
this.headerFilterStrategy = endpoint.getHeaderFilterStrategy();
} else {
this.headerFilterStrategy = new JmsHeaderFilterStrategy(endpoint.isIncludeAllJMSXProperties());
}
if (endpoint.getJmsKeyFormatStrategy() != null) {
this.jmsKeyFormatStrategy = endpoint.getJmsKeyFormatStrategy();
} else {
this.jmsKeyFormatStrategy = new DefaultJmsKeyFormatStrategy();
}
if (endpoint.getMessageCreatedStrategy() != null) {
this.messageCreatedStrategy = endpoint.getMessageCreatedStrategy();
} else if (endpoint.getComponent() != null) {
// fallback and use from component
this.messageCreatedStrategy = endpoint.getComponent().getMessageCreatedStrategy();
} else {
this.messageCreatedStrategy = null;
}
}
/**
* Extracts the body from the JMS message
*
* @param exchange the exchange
* @param message the message to extract its body
* @return the body, can be <tt>null</tt>
*/
public Object extractBodyFromJms(Exchange exchange, Message message) {
try {
// is a custom message converter configured on endpoint then use it instead of doing the extraction
// based on message type
if (endpoint != null && endpoint.getMessageConverter() != null) {
if (LOG.isTraceEnabled()) {
LOG.trace("Extracting body using a custom MessageConverter: {} from JMS message: {}", endpoint.getMessageConverter(), message);
}
return endpoint.getMessageConverter().fromMessage(message);
}
// if we are configured to not map the jms message then return it as body
if (endpoint != null && !endpoint.getConfiguration().isMapJmsMessage()) {
LOG.trace("Option map JMS message is false so using JMS message as body: {}", message);
return message;
}
if (message instanceof ObjectMessage) {
LOG.trace("Extracting body as a ObjectMessage from JMS message: {}", message);
ObjectMessage objectMessage = (ObjectMessage)message;
Object payload = objectMessage.getObject();
if (payload instanceof DefaultExchangeHolder) {
DefaultExchangeHolder holder = (DefaultExchangeHolder) payload;
DefaultExchangeHolder.unmarshal(exchange, holder);
return exchange.getIn().getBody();
} else {
return objectMessage.getObject();
}
} else if (message instanceof TextMessage) {
LOG.trace("Extracting body as a TextMessage from JMS message: {}", message);
TextMessage textMessage = (TextMessage)message;
return textMessage.getText();
} else if (message instanceof MapMessage) {
LOG.trace("Extracting body as a MapMessage from JMS message: {}", message);
return createMapFromMapMessage((MapMessage)message);
} else if (message instanceof BytesMessage) {
LOG.trace("Extracting body as a BytesMessage from JMS message: {}", message);
return createByteArrayFromBytesMessage((BytesMessage)message);
} else if (message instanceof StreamMessage) {
LOG.trace("Extracting body as a StreamMessage from JMS message: {}", message);
return message;
} else {
return null;
}
} catch (JMSException e) {
throw new RuntimeCamelException("Failed to extract body due to: " + e + ". Message: " + message, e);
}
}
public Map<String, Object> extractHeadersFromJms(Message jmsMessage, Exchange exchange) {
Map<String, Object> map = new HashMap<String, Object>();
if (jmsMessage != null) {
// lets populate the standard JMS message headers
try {
map.put("JMSCorrelationID", jmsMessage.getJMSCorrelationID());
map.put("JMSDeliveryMode", jmsMessage.getJMSDeliveryMode());
map.put("JMSDestination", jmsMessage.getJMSDestination());
map.put("JMSExpiration", jmsMessage.getJMSExpiration());
map.put("JMSMessageID", jmsMessage.getJMSMessageID());
map.put("JMSPriority", jmsMessage.getJMSPriority());
map.put("JMSRedelivered", jmsMessage.getJMSRedelivered());
map.put("JMSTimestamp", jmsMessage.getJMSTimestamp());
map.put("JMSReplyTo", JmsMessageHelper.getJMSReplyTo(jmsMessage));
map.put("JMSType", JmsMessageHelper.getJMSType(jmsMessage));
// this works around a bug in the ActiveMQ property handling
map.put(JMS_X_GROUP_ID, JmsMessageHelper.getStringProperty(jmsMessage, JMS_X_GROUP_ID));
map.put("JMSXUserID", JmsMessageHelper.getStringProperty(jmsMessage, "JMSXUserID"));
} catch (JMSException e) {
throw new RuntimeCamelException(e);
}
Enumeration<?> names;
try {
names = jmsMessage.getPropertyNames();
} catch (JMSException e) {
throw new RuntimeCamelException(e);
}
while (names.hasMoreElements()) {
String name = names.nextElement().toString();
try {
Object value = JmsMessageHelper.getProperty(jmsMessage, name);
if (headerFilterStrategy != null
&& headerFilterStrategy.applyFilterToExternalHeaders(name, value, exchange)) {
continue;
}
// must decode back from safe JMS header name to original header name
// when storing on this Camel JmsMessage object.
String key = jmsKeyFormatStrategy.decodeKey(name);
map.put(key, value);
} catch (JMSException e) {
throw new RuntimeCamelException(name, e);
}
}
}
return map;
}
public Object getObjectProperty(Message jmsMessage, String name) throws JMSException {
// try a direct lookup first
Object answer = jmsMessage.getObjectProperty(name);
if (answer == null) {
// then encode the key and do another lookup
String key = jmsKeyFormatStrategy.encodeKey(name);
answer = jmsMessage.getObjectProperty(key);
}
return answer;
}
protected byte[] createByteArrayFromBytesMessage(BytesMessage message) throws JMSException {
if (message.getBodyLength() > Integer.MAX_VALUE) {
LOG.warn("Length of BytesMessage is too long: {}", message.getBodyLength());
return null;
}
byte[] result = new byte[(int)message.getBodyLength()];
message.readBytes(result);
return result;
}
/**
* Creates a JMS message from the Camel exchange and message
*
* @param exchange the current exchange
* @param session the JMS session used to create the message
* @return a newly created JMS Message instance containing the
* @throws JMSException if the message could not be created
*/
public Message makeJmsMessage(Exchange exchange, Session session) throws JMSException {
Message answer = makeJmsMessage(exchange, exchange.getIn(), session, null);
if (answer != null && messageCreatedStrategy != null) {
messageCreatedStrategy.onMessageCreated(answer, session, exchange, null);
}
return answer;
}
/**
* Creates a JMS message from the Camel exchange and message
*
* @param exchange the current exchange
* @param camelMessage the body to make a javax.jms.Message as
* @param session the JMS session used to create the message
* @param cause optional exception occurred that should be sent as reply instead of a regular body
* @return a newly created JMS Message instance containing the
* @throws JMSException if the message could not be created
*/
public Message makeJmsMessage(Exchange exchange, org.apache.camel.Message camelMessage, Session session, Exception cause) throws JMSException {
Message answer = null;
boolean alwaysCopy = endpoint != null && endpoint.getConfiguration().isAlwaysCopyMessage();
boolean force = endpoint != null && endpoint.getConfiguration().isForceSendOriginalMessage();
if (!alwaysCopy && camelMessage instanceof JmsMessage) {
JmsMessage jmsMessage = (JmsMessage)camelMessage;
if (!jmsMessage.shouldCreateNewMessage() || force) {
answer = jmsMessage.getJmsMessage();
if (!force) {
// answer must match endpoint type
JmsMessageType type = endpoint != null ? endpoint.getConfiguration().getJmsMessageType() : null;
if (type != null && answer != null) {
if (type == JmsMessageType.Text) {
answer = answer instanceof TextMessage ? answer : null;
} else if (type == JmsMessageType.Bytes) {
answer = answer instanceof BytesMessage ? answer : null;
} else if (type == JmsMessageType.Map) {
answer = answer instanceof MapMessage ? answer : null;
} else if (type == JmsMessageType.Object) {
answer = answer instanceof ObjectMessage ? answer : null;
} else if (type == JmsMessageType.Stream) {
answer = answer instanceof StreamMessage ? answer : null;
}
}
}
}
}
if (answer == null) {
if (cause != null) {
// an exception occurred so send it as response
LOG.debug("Will create JmsMessage with caused exception: {}", cause);
// create jms message containing the caused exception
answer = createJmsMessage(cause, session);
} else {
ObjectHelper.notNull(camelMessage, "message");
// create regular jms message using the camel message body
answer = createJmsMessage(exchange, camelMessage.getBody(), camelMessage.getHeaders(), session, exchange.getContext());
appendJmsProperties(answer, exchange, camelMessage);
}
}
if (answer != null && messageCreatedStrategy != null) {
messageCreatedStrategy.onMessageCreated(answer, session, exchange, null);
}
return answer;
}
/**
* Appends the JMS headers from the Camel {@link JmsMessage}
*/
public void appendJmsProperties(Message jmsMessage, Exchange exchange) throws JMSException {
appendJmsProperties(jmsMessage, exchange, exchange.getIn());
}
/**
* Appends the JMS headers from the Camel {@link JmsMessage}
*/
public void appendJmsProperties(Message jmsMessage, Exchange exchange, org.apache.camel.Message in) throws JMSException {
Set<Map.Entry<String, Object>> entries = in.getHeaders().entrySet();
for (Map.Entry<String, Object> entry : entries) {
String headerName = entry.getKey();
Object headerValue = entry.getValue();
appendJmsProperty(jmsMessage, exchange, in, headerName, headerValue);
}
}
public void appendJmsProperty(Message jmsMessage, Exchange exchange, org.apache.camel.Message in,
String headerName, Object headerValue) throws JMSException {
if (isStandardJMSHeader(headerName)) {
if (headerName.equals("JMSCorrelationID")) {
jmsMessage.setJMSCorrelationID(ExchangeHelper.convertToType(exchange, String.class, headerValue));
} else if (headerName.equals("JMSReplyTo") && headerValue != null) {
if (headerValue instanceof String) {
// if the value is a String we must normalize it first, and must include the prefix
// as ActiveMQ requires that when converting the String to a javax.jms.Destination type
headerValue = normalizeDestinationName((String) headerValue, true);
}
Destination replyTo = ExchangeHelper.convertToType(exchange, Destination.class, headerValue);
JmsMessageHelper.setJMSReplyTo(jmsMessage, replyTo);
} else if (headerName.equals("JMSType")) {
jmsMessage.setJMSType(ExchangeHelper.convertToType(exchange, String.class, headerValue));
} else if (headerName.equals("JMSPriority")) {
jmsMessage.setJMSPriority(ExchangeHelper.convertToType(exchange, Integer.class, headerValue));
} else if (headerName.equals("JMSDeliveryMode")) {
JmsMessageHelper.setJMSDeliveryMode(exchange, jmsMessage, headerValue);
} else if (headerName.equals("JMSExpiration")) {
jmsMessage.setJMSExpiration(ExchangeHelper.convertToType(exchange, Long.class, headerValue));
} else {
// The following properties are set by the MessageProducer:
// JMSDestination
// The following are set on the underlying JMS provider:
// JMSMessageID, JMSTimestamp, JMSRedelivered
// log at trace level to not spam log
LOG.trace("Ignoring JMS header: {} with value: {}", headerName, headerValue);
}
} else if (shouldOutputHeader(in, headerName, headerValue, exchange)) {
// only primitive headers and strings is allowed as properties
// see message properties: http://java.sun.com/j2ee/1.4/docs/api/javax/jms/Message.html
Object value = getValidJMSHeaderValue(headerName, headerValue);
if (value != null) {
// must encode to safe JMS header name before setting property on jmsMessage
String key = jmsKeyFormatStrategy.encodeKey(headerName);
// set the property
JmsMessageHelper.setProperty(jmsMessage, key, value);
} else if (LOG.isDebugEnabled()) {
// okay the value is not a primitive or string so we cannot sent it over the wire
LOG.debug("Ignoring non primitive header: {} of class: {} with value: {}",
new Object[]{headerName, headerValue.getClass().getName(), headerValue});
}
}
}
/**
* Is the given header a standard JMS header
* @param headerName the header name
* @return <tt>true</tt> if its a standard JMS header
*/
protected boolean isStandardJMSHeader(String headerName) {
if (!headerName.startsWith("JMS")) {
return false;
}
if (headerName.startsWith("JMSX")) {
return false;
}
// vendors will use JMS_XXX as their special headers (where XXX is vendor name, such as JMS_IBM)
if (headerName.startsWith("JMS_")) {
return false;
}
// the 4th char must be a letter to be a standard JMS header
if (headerName.length() > 3) {
Character fourth = headerName.charAt(3);
if (Character.isLetter(fourth)) {
return true;
}
}
return false;
}
/**
* Strategy to test if the given header is valid according to the JMS spec to be set as a property
* on the JMS message.
* <p/>
* This default implementation will allow:
* <ul>
* <li>any primitives and their counter Objects (Integer, Double etc.)</li>
* <li>String and any other literals, Character, CharSequence</li>
* <li>Boolean</li>
* <li>Number</li>
* <li>java.util.Date</li>
* </ul>
*
* @param headerName the header name
* @param headerValue the header value
* @return the value to use, <tt>null</tt> to ignore this header
*/
protected Object getValidJMSHeaderValue(String headerName, Object headerValue) {
if (headerValue instanceof String) {
return headerValue;
} else if (headerValue instanceof BigInteger) {
return headerValue.toString();
} else if (headerValue instanceof BigDecimal) {
return headerValue.toString();
} else if (headerValue instanceof Number) {
return headerValue;
} else if (headerValue instanceof Character) {
return headerValue;
} else if (headerValue instanceof CharSequence) {
return headerValue.toString();
} else if (headerValue instanceof Boolean) {
return headerValue;
} else if (headerValue instanceof Date) {
return headerValue.toString();
}
return null;
}
protected Message createJmsMessage(Exception cause, Session session) throws JMSException {
LOG.trace("Using JmsMessageType: {}", Object);
Message answer = session.createObjectMessage(cause);
// ensure default delivery mode is used by default
answer.setJMSDeliveryMode(Message.DEFAULT_DELIVERY_MODE);
return answer;
}
protected Message createJmsMessage(Exchange exchange, Object body, Map<String, Object> headers, Session session, CamelContext context) throws JMSException {
JmsMessageType type = null;
// special for transferExchange
if (endpoint != null && endpoint.isTransferExchange()) {
LOG.trace("Option transferExchange=true so we use JmsMessageType: Object");
Serializable holder = DefaultExchangeHolder.marshal(exchange);
Message answer = session.createObjectMessage(holder);
// ensure default delivery mode is used by default
answer.setJMSDeliveryMode(Message.DEFAULT_DELIVERY_MODE);
return answer;
}
// use a custom message converter
if (endpoint != null && endpoint.getMessageConverter() != null) {
if (LOG.isTraceEnabled()) {
LOG.trace("Creating JmsMessage using a custom MessageConverter: {} with body: {}", endpoint.getMessageConverter(), body);
}
return endpoint.getMessageConverter().toMessage(body, session);
}
// check if header have a type set, if so we force to use it
if (headers.containsKey(JmsConstants.JMS_MESSAGE_TYPE)) {
type = context.getTypeConverter().convertTo(JmsMessageType.class, headers.get(JmsConstants.JMS_MESSAGE_TYPE));
} else if (endpoint != null && endpoint.getConfiguration().getJmsMessageType() != null) {
// force a specific type from the endpoint configuration
type = endpoint.getConfiguration().getJmsMessageType();
} else {
type = getJMSMessageTypeForBody(exchange, body, headers, session, context);
}
// create the JmsMessage based on the type
if (type != null) {
if (body == null && (endpoint != null && !endpoint.getConfiguration().isAllowNullBody())) {
throw new JMSException("Cannot send message as message body is null, and option allowNullBody is false.");
}
LOG.trace("Using JmsMessageType: {}", type);
Message answer = createJmsMessageForType(exchange, body, headers, session, context, type);
// ensure default delivery mode is used by default
answer.setJMSDeliveryMode(Message.DEFAULT_DELIVERY_MODE);
return answer;
}
// check for null body
if (body == null && (endpoint != null && !endpoint.getConfiguration().isAllowNullBody())) {
throw new JMSException("Cannot send message as message body is null, and option allowNullBody is false.");
}
// warn if the body could not be mapped
if (body != null && LOG.isWarnEnabled()) {
LOG.warn("Cannot determine specific JmsMessage type to use from body class."
+ " Will use generic JmsMessage."
+ " Body class: " + ObjectHelper.classCanonicalName(body)
+ ". If you want to send a POJO then your class might need to implement java.io.Serializable"
+ ", or you can force a specific type by setting the jmsMessageType option on the JMS endpoint.");
}
// return a default message
Message answer = session.createMessage();
// ensure default delivery mode is used by default
answer.setJMSDeliveryMode(Message.DEFAULT_DELIVERY_MODE);
return answer;
}
/**
* Return the {@link JmsMessageType}
*
* @return type or null if no mapping was possible
*/
protected JmsMessageType getJMSMessageTypeForBody(Exchange exchange, Object body, Map<String, Object> headers, Session session, CamelContext context) {
JmsMessageType type = null;
// let body determine the type
if (body instanceof Node || body instanceof String) {
type = Text;
} else if (body instanceof byte[] || body instanceof WrappedFile || body instanceof File || body instanceof Reader
|| body instanceof InputStream || body instanceof ByteBuffer || body instanceof StreamCache) {
type = Bytes;
} else if (body instanceof Map) {
type = Map;
} else if (body instanceof Serializable) {
type = Object;
} else if (exchange.getContext().getTypeConverter().tryConvertTo(File.class, body) != null
|| exchange.getContext().getTypeConverter().tryConvertTo(InputStream.class, body) != null) {
type = Bytes;
}
return type;
}
/**
*
* Create the {@link Message}
*
* @return jmsMessage or null if the mapping was not successfully
*/
protected Message createJmsMessageForType(Exchange exchange, Object body, Map<String, Object> headers, Session session, CamelContext context, JmsMessageType type) throws JMSException {
switch (type) {
case Text: {
TextMessage message = session.createTextMessage();
if (body != null) {
String payload = context.getTypeConverter().convertTo(String.class, exchange, body);
message.setText(payload);
}
return message;
}
case Bytes: {
BytesMessage message = session.createBytesMessage();
if (body != null) {
byte[] payload = context.getTypeConverter().convertTo(byte[].class, exchange, body);
message.writeBytes(payload);
}
return message;
}
case Map: {
MapMessage message = session.createMapMessage();
if (body != null) {
Map<?, ?> payload = context.getTypeConverter().convertTo(Map.class, exchange, body);
populateMapMessage(message, payload, context);
}
return message;
}
case Object:
ObjectMessage message = session.createObjectMessage();
if (body != null) {
try {
Serializable payload = context.getTypeConverter().mandatoryConvertTo(Serializable.class, exchange, body);
message.setObject(payload);
} catch (NoTypeConversionAvailableException e) {
// cannot convert to serializable then thrown an exception to avoid sending a null message
JMSException cause = new MessageFormatException(e.getMessage());
cause.initCause(e);
throw cause;
}
}
return message;
default:
break;
}
return null;
}
/**
* Populates a {@link MapMessage} from a {@link Map} instance.
*/
protected void populateMapMessage(MapMessage message, Map<?, ?> map, CamelContext context)
throws JMSException {
for (Entry<?, ?> entry : map.entrySet()) {
String keyString = CamelContextHelper.convertTo(context, String.class, entry.getKey());
if (keyString != null) {
message.setObject(keyString, entry.getValue());
}
}
}
/**
* Extracts a {@link Map} from a {@link MapMessage}
*/
public Map<String, Object> createMapFromMapMessage(MapMessage message) throws JMSException {
Map<String, Object> answer = new HashMap<String, Object>();
Enumeration<?> names = message.getMapNames();
while (names.hasMoreElements()) {
String name = names.nextElement().toString();
Object value = message.getObject(name);
answer.put(name, value);
}
return answer;
}
/**
* Strategy to allow filtering of headers which are put on the JMS message
* <p/>
* <b>Note</b>: Currently only supports sending java identifiers as keys
*/
protected boolean shouldOutputHeader(org.apache.camel.Message camelMessage, String headerName,
Object headerValue, Exchange exchange) {
return headerFilterStrategy == null
|| !headerFilterStrategy.applyFilterToCamelHeaders(headerName, headerValue, exchange);
}
}
| apache-2.0 |
NikhilVijayakumar/SalesPerson | EnergyEyeSalesPerson/src/com/energyeye/salesperson/dbComponents/IDBHelper.java | 366 | package com.energyeye.salesperson.dbComponents;
public interface IDBHelper {
public boolean onCreate();
public void insert();
public void update(String whereClause, String[] whereArgs);
public void delete(String where, String[] args);
public void select(String[] projection, String selection,String[] selectionArgs, String sortOrder);
}
| apache-2.0 |
dylanswartz/nakamura | bundles/files/impl/src/main/java/org/sakaiproject/nakamura/files/search/RelatedContentSearchBatchResultProcessor.java | 7266 | /**
* Licensed to the Sakai Foundation (SF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.sakaiproject.nakamura.files.search;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Properties;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.Service;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.commons.json.JSONException;
import org.apache.sling.commons.json.io.JSONWriter;
import org.sakaiproject.nakamura.api.lite.Session;
import org.sakaiproject.nakamura.api.lite.StorageClientException;
import org.sakaiproject.nakamura.api.lite.StorageClientUtils;
import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException;
import org.sakaiproject.nakamura.api.lite.content.Content;
import org.sakaiproject.nakamura.api.lite.content.ContentManager;
import org.sakaiproject.nakamura.api.search.solr.Result;
import org.sakaiproject.nakamura.api.search.solr.SolrSearchBatchResultProcessor;
import org.sakaiproject.nakamura.api.search.solr.SolrSearchConstants;
import org.sakaiproject.nakamura.api.search.solr.SolrSearchException;
import org.sakaiproject.nakamura.api.search.solr.SolrSearchResultProcessor;
import org.sakaiproject.nakamura.api.search.solr.SolrSearchUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Iterator;
import java.util.Set;
/**
* Create a feed that lists content related to the content in My Library. The criteria
* that should be used for this are:
* <p>
* - Other content with similar words in the title</br> - Other content from my contact's
* library</br> - Other content with similar tags</br> - Other content with similar
* directory locations
* </p>
*
* When less than 11 items are found for these criteria, the feed should be filled up with
* random content. However, preference should be given to items that have a thumbnail
* (page1-small.jpg), a description, tags and comments.
*/
@Component(immediate = true, metatype = true)
@Properties(value = {
@Property(name = "service.vendor", value = "The Sakai Foundation"),
@Property(name = SolrSearchConstants.REG_BATCH_PROCESSOR_NAMES, value = "RelatedContentSearchBatchResultProcessor") })
@Service(value = SolrSearchBatchResultProcessor.class)
public class RelatedContentSearchBatchResultProcessor extends
LiteFileSearchBatchResultProcessor {
private static final Logger LOG = LoggerFactory
.getLogger(RelatedContentSearchBatchResultProcessor.class);
private static final String DEFAULT_SEARCH_PROC_TARGET = "(&("
+ SolrSearchResultProcessor.DEFAULT_PROCESSOR_PROP + "=true))";
@Reference(target = DEFAULT_SEARCH_PROC_TARGET)
private transient SolrSearchResultProcessor defaultSearchProcessor;
/**
* "These go to eleven"
*/
public static final int VOLUME = 11;
/**
* {@inheritDoc}
*
* @see org.sakaiproject.nakamura.files.search.LiteFileSearchBatchResultProcessor#writeResults(org.apache.sling.api.SlingHttpServletRequest,
* org.apache.sling.commons.json.io.JSONWriter, java.util.Iterator)
*/
@Override
public void writeResults(SlingHttpServletRequest request, JSONWriter write,
Iterator<Result> iterator) throws JSONException {
long startTicks = System.currentTimeMillis();
final Set<String> uniquePathsProcessed = super.writeResultsInternal(request, write,
iterator);
final int resultsCount = uniquePathsProcessed.size();
if (resultsCount < VOLUME) {
/* we need to grab some random content to reach minimum of 11 results. */
final Session session = StorageClientUtils.adaptToSession(request
.getResourceResolver().adaptTo(javax.jcr.Session.class));
final String user = MeManagerViewerSearchPropertyProvider.getUser(request);
// query to find ALL content that is not mine, boost some fields that have values
final StringBuilder sourceQuery = new StringBuilder(
"(resourceType:sakai/pooled-content AND (manager:((everyone OR anonymous) AND NOT ");
sourceQuery.append(user);
sourceQuery.append(") OR viewer:((everyone OR anonymous) AND NOT ");
sourceQuery.append(user);
sourceQuery
.append("))) OR (resourceType:sakai/pooled-content AND (manager:((everyone OR anonymous) AND NOT ");
sourceQuery.append(user);
sourceQuery.append(") OR viewer:((everyone OR anonymous) AND NOT ");
sourceQuery.append(user);
// FYI: ^4 == 4 times boost; default boost value is one
sourceQuery.append(")) AND (description:[* TO *] OR tag:[* TO *]))^4");
try {
final Iterator<Result> i = SolrSearchUtil.getRandomResults(request,
defaultSearchProcessor, sourceQuery.toString(), "items",
String.valueOf(VOLUME), "page", "0", "sort", "score desc");
if (i != null) {
final ContentManager contentManager = session.getContentManager();
while (i.hasNext() && uniquePathsProcessed.size() <= VOLUME) {
final Result result = i.next();
final String path = (String) result.getFirstValue("path");
if (uniquePathsProcessed.contains(path)) {
// we have already painted this result
continue;
}
final Content content = contentManager.get(path);
if (content != null) {
super.handleContent(content, session, write, 0);
uniquePathsProcessed.add(path);
} else {
// fail quietly in this edge case
LOG.debug("Content not found: {}", path);
}
}
if (uniquePathsProcessed.size() < VOLUME) {
LOG.debug(
"Did not meet functional specification. There should be at least {} results; actual size was: {}",
VOLUME, uniquePathsProcessed.size());
}
}
} catch (AccessDeniedException e) {
// quietly swallow access denied
LOG.debug(e.getLocalizedMessage(), e);
} catch (SolrSearchException e) {
LOGGER.error(e.getMessage(), e);
throw new IllegalStateException(e);
} catch (StorageClientException e) {
LOG.error(e.getLocalizedMessage(), e);
throw new IllegalStateException(e);
}
}
long endTicks = System.currentTimeMillis();
if (LOG.isDebugEnabled()) {
LOG.debug("writeResults() took {} seconds",
new Object[] { (float) (endTicks - startTicks) / 1000 });
}
}
}
| apache-2.0 |
wso2/wso2-commons-vfs | commons-vfs2/src/main/java/org/apache/commons/vfs2/util/OsFamily.java | 1624 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.vfs2.util;
/**
* An enumerated type, which represents an OS family.
*/
public final class OsFamily {
private final String name;
private final OsFamily[] families;
OsFamily(final String name) {
this.name = name;
families = new OsFamily[0];
}
OsFamily(final String name, final OsFamily[] families) {
this.name = name;
this.families = families;
}
/**
* Returns the name of this family.
*
* @return The name of this family.
*/
public String getName() {
return name;
}
/**
* Returns the OS families that this family belongs to.
*
* @return an array of OSFamily objects that this family belongs to.
*/
public OsFamily[] getFamilies() {
return families;
}
}
| apache-2.0 |
jexp/idea2 | plugins/ant/src/com/intellij/lang/ant/AntPropertyRefSelectioner.java | 2940 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.lang.ant;
import com.intellij.codeInsight.editorActions.ExtendWordSelectionHandler;
import com.intellij.lang.ant.psi.AntFile;
import com.intellij.lang.ant.psi.impl.reference.AntPropertyReference;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiReference;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
/**
* @author Eugene Zhuravlev
* Date: Sep 14, 2007
*/
public class AntPropertyRefSelectioner implements ExtendWordSelectionHandler {
public boolean canSelect(final PsiElement e) {
return getRangesToSelect(e).size() > 0;
}
public List<TextRange> select(final PsiElement e, final CharSequence editorText, final int cursorOffset, final Editor editor) {
List<TextRange> rangesToSelect = getRangesToSelect(e);
for (Iterator it = rangesToSelect.iterator(); it.hasNext();) {
final TextRange range = (TextRange)it.next();
if (!range.contains(cursorOffset)) {
it.remove();
}
}
return rangesToSelect;
}
@NotNull
private static List<TextRange> getRangesToSelect(PsiElement e) {
final PsiFile containingFile = e.getContainingFile();
if (containingFile == null) {
return Collections.emptyList();
}
final AntFile antFile = AntSupport.getAntFile(containingFile);
if (antFile == null) {
return Collections.emptyList();
}
final PsiElement antElement = antFile.findElementAt(e.getTextOffset());
if (antElement == null) {
return Collections.emptyList();
}
final TextRange antElementRange = antElement.getTextRange();
final TextRange selectionElementRange = e.getTextRange();
final PsiReference[] refs = antElement.getReferences();
ArrayList<TextRange> ranges = new ArrayList<TextRange>(refs.length);
for (PsiReference ref : refs) {
if (ref instanceof AntPropertyReference) {
TextRange refRange = ref.getRangeInElement();
refRange = refRange.shiftRight(antElementRange.getStartOffset());
if (selectionElementRange.contains(refRange)) {
ranges.add(refRange);
}
}
}
return ranges;
}
}
| apache-2.0 |
ryanemerson/activemq-artemis | artemis-server/src/main/java/org/apache/activemq/artemis/core/server/NodeManager.java | 7372 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.server;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import org.apache.activemq.artemis.api.core.ActiveMQIllegalStateException;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.utils.UUID;
import org.apache.activemq.artemis.utils.UUIDGenerator;
public abstract class NodeManager implements ActiveMQComponent
{
protected static final byte FIRST_TIME_START = '0';
private static final String SERVER_LOCK_NAME = "server.lock";
private static final String ACCESS_MODE = "rw";
protected final boolean replicatedBackup;
private final File directory;
private final Object nodeIDGuard = new Object();
private SimpleString nodeID;
private UUID uuid;
private boolean isStarted = false;
protected FileChannel channel;
public NodeManager(final boolean replicatedBackup, final File directory)
{
this.directory = directory;
this.replicatedBackup = replicatedBackup;
}
// --------------------------------------------------------------------
public abstract void awaitLiveNode() throws Exception;
public abstract void startBackup() throws Exception;
public abstract void startLiveNode() throws Exception;
public abstract void pauseLiveServer() throws Exception;
public abstract void crashLiveServer() throws Exception;
public abstract void releaseBackup() throws Exception;
// --------------------------------------------------------------------
public synchronized void start() throws Exception
{
isStarted = true;
}
public boolean isStarted()
{
return isStarted;
}
public SimpleString getNodeId()
{
synchronized (nodeIDGuard)
{
return nodeID;
}
}
public abstract SimpleString readNodeId() throws ActiveMQIllegalStateException, IOException;
public UUID getUUID()
{
synchronized (nodeIDGuard)
{
return uuid;
}
}
/**
* Sets the nodeID.
* <p>
* Only used by replicating backups.
*
* @param nodeID
*/
public void setNodeID(String nodeID)
{
synchronized (nodeIDGuard)
{
this.nodeID = new SimpleString(nodeID);
this.uuid = new UUID(UUID.TYPE_TIME_BASED, UUID.stringToBytes(nodeID));
}
}
/**
* @param generateUUID
*/
protected void setUUID(UUID generateUUID)
{
synchronized (nodeIDGuard)
{
uuid = generateUUID;
nodeID = new SimpleString(uuid.toString());
}
}
public abstract boolean isAwaitingFailback() throws Exception;
public abstract boolean isBackupLive() throws Exception;
public abstract void interrupt();
@Override
public synchronized void stop() throws Exception
{
FileChannel channelCopy = channel;
if (channelCopy != null)
channelCopy.close();
isStarted = false;
}
public final void stopBackup() throws Exception
{
if (replicatedBackup && getNodeId() != null)
{
setUpServerLockFile();
}
releaseBackup();
}
/**
* Ensures existence of persistent information about the server's nodeID.
* <p>
* Roughly the different use cases are:
* <ol>
* <li>old live server restarts: a server.lock file already exists and contains a nodeID.
* <li>new live server starting for the first time: no file exists, and we just *create* a new
* UUID to use as nodeID
* <li>replicated backup received its nodeID from its live: no file exists, we need to persist
* the *current* nodeID
* </ol>
*/
protected final synchronized void setUpServerLockFile() throws IOException
{
File serverLockFile = newFile(SERVER_LOCK_NAME);
boolean fileCreated = false;
int count = 0;
while (!serverLockFile.exists())
{
try
{
fileCreated = serverLockFile.createNewFile();
}
catch (RuntimeException e)
{
ActiveMQServerLogger.LOGGER.nodeManagerCantOpenFile(e, serverLockFile);
throw e;
}
catch (IOException e)
{
/*
* on some OS's this may fail weirdly even tho the parent dir exists, retrying will work, some weird timing issue i think
* */
if (count < 5)
{
try
{
Thread.sleep(100);
}
catch (InterruptedException e1)
{
}
count++;
continue;
}
ActiveMQServerLogger.LOGGER.nodeManagerCantOpenFile(e, serverLockFile);
throw e;
}
if (!fileCreated)
{
throw new IllegalStateException("Unable to create server lock file");
}
}
@SuppressWarnings("resource")
RandomAccessFile raFile = new RandomAccessFile(serverLockFile, ACCESS_MODE);
channel = raFile.getChannel();
if (fileCreated)
{
ByteBuffer id = ByteBuffer.allocateDirect(3);
byte[] bytes = new byte[3];
bytes[0] = FIRST_TIME_START;
bytes[1] = FIRST_TIME_START;
bytes[2] = FIRST_TIME_START;
id.put(bytes, 0, 3);
id.position(0);
channel.write(id, 0);
channel.force(true);
}
createNodeId();
}
/**
* @return
*/
protected final File newFile(final String fileName)
{
File file = new File(directory, fileName);
return file;
}
protected final synchronized void createNodeId() throws IOException
{
synchronized (nodeIDGuard)
{
ByteBuffer id = ByteBuffer.allocateDirect(16);
int read = channel.read(id, 3);
if (replicatedBackup)
{
id.position(0);
id.put(getUUID().asBytes(), 0, 16);
id.position(0);
channel.write(id, 3);
channel.force(true);
}
else if (read != 16)
{
setUUID(UUIDGenerator.getInstance().generateUUID());
id.put(getUUID().asBytes(), 0, 16);
id.position(0);
channel.write(id, 3);
channel.force(true);
}
else
{
byte[] bytes = new byte[16];
id.position(0);
id.get(bytes);
setUUID(new UUID(UUID.TYPE_TIME_BASED, bytes));
}
}
}
}
| apache-2.0 |
cloudera-labs/phoenix | phoenix-core/src/it/java/org/apache/phoenix/end2end/CSVCommonsLoaderIT.java | 30479 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.StringReader;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collections;
import java.util.Properties;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.jdbc.PhoenixTestDriver;
import org.apache.phoenix.schema.types.PInteger;
import org.apache.phoenix.schema.types.PArrayDataType;
import org.apache.phoenix.util.CSVCommonsLoader;
import org.apache.phoenix.util.DateUtil;
import org.apache.phoenix.util.PhoenixRuntime;
import org.junit.Test;
public class CSVCommonsLoaderIT extends BaseHBaseManagedTimeIT {
private static final String DATATYPE_TABLE = "DATATYPE";
private static final String DATATYPES_CSV_VALUES = "CKEY, CVARCHAR, CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE\n"
+ "KEY1,A,2147483647,1.1,0,TRUE,9223372036854775807,0,1990-12-31 10:59:59,1999-12-31 23:59:59\n"
+ "KEY2,B,-2147483648,-1.1,2147483647,FALSE,-9223372036854775808,9223372036854775807,2000-01-01 00:00:01,2012-02-29 23:59:59\n";
private static final String STOCK_TABLE = "STOCK_SYMBOL";
private static final String STOCK_TABLE_MULTI = "STOCK_SYMBOL_MULTI";
private static final String STOCK_CSV_VALUES = "AAPL,APPLE Inc.\n"
+ "CRM,SALESFORCE\n" + "GOOG,Google\n"
+ "HOG,Harlet-Davidson Inc.\n" + "HPQ,Hewlett Packard\n"
+ "INTC,Intel\n" + "MSFT,Microsoft\n" + "WAG,Walgreens\n"
+ "WMT,Walmart\n";
private static final String[] STOCK_COLUMNS_WITH_BOGUS = new String[] {
"SYMBOL", "BOGUS" };
private static final String[] STOCK_COLUMNS = new String[] { "SYMBOL",
"COMPANY" };
private static final String STOCK_CSV_VALUES_WITH_HEADER = STOCK_COLUMNS[0]
+ "," + STOCK_COLUMNS[1] + "\n" + STOCK_CSV_VALUES;
private static final String STOCK_CSV_VALUES_WITH_DELIMITER = "APPL"
+ '\u0001' + '\u0002' + "APPLE\n" + " Inc" + '\u0002' + "\n"
+ "MSFT" + '\u0001' + "Microsoft\n";
private static final String STOCK_TDV_VALUES = "AAPL\tAPPLE Inc\n"
+ "CRM\tSALESFORCE\n" + "GOOG\tGoogle\n"
+ "HOG\tHarlet-Davidson Inc.\n" + "HPQ\tHewlett Packard\n"
+ "INTC\tIntel\n" + "MSFT\tMicrosoft\n" + "WAG\tWalgreens\n"
+ "WMT\tWalmart\n";
private static final String STOCK_TDV_VALUES_WITH_HEADER = STOCK_COLUMNS[0]
+ "\t" + STOCK_COLUMNS[1] + "\n" + STOCK_TDV_VALUES;
private static final String ENCAPSULATED_CHARS_TABLE = "ENCAPSULATEDCHAR";
private static final String[] ENCAPSULATED_CHARS_COLUMNS = new String[] {
"MYKEY", "MYVALUE" };
private static final String CSV_VALUES_ENCAPSULATED_CONTROL_CHARS = "ALL THREEF,\"This has a all three , , \"\" \r\n in it. \"\n"
+ "COMMA,\"This has a comma , in it. \"\n"
+ "CRLF,\"This has a crlf \r\n in it. \"\n"
+ "QUOTE,\"This has a quote \"\" in it. \"\n";
private static final String CSV_VALUES_ENCAPSULATED_CONTROL_CHARS_WITH_HEADER = ENCAPSULATED_CHARS_COLUMNS[0]
+ ","
+ ENCAPSULATED_CHARS_COLUMNS[1]
+ "\n"
+ CSV_VALUES_ENCAPSULATED_CONTROL_CHARS;
private static final String CSV_VALUES_BAD_ENCAPSULATED_CONTROL_CHARS = "ALL THREEF,\"This has a all three , , \"\" \r\n in it. \"\n"
+ "COMMA,\"This has a comma , in it. \"\n"
+ "CRLF,\"This has a crlf \r\n in it. \"\n"
+ "BADENCAPSULATEDQUOTE,\"\"This has a bad quote in it. \"\n";
private static final String CSV_VALUES_BAD_ENCAPSULATED_CONTROL_CHARS_WITH_HEADER = ENCAPSULATED_CHARS_COLUMNS[0]
+ ","
+ ENCAPSULATED_CHARS_COLUMNS[1]
+ "\n"
+ CSV_VALUES_BAD_ENCAPSULATED_CONTROL_CHARS;
@Test
public void testCSVCommonsUpsert() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
+ "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
conn = DriverManager.getConnection(getUrl()).unwrap(
PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn,
new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
Collections.<String> emptyList(), true);
csvUtil.upsert(new StringReader(STOCK_CSV_VALUES_WITH_HEADER));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn
.prepareStatement("SELECT SYMBOL, COMPANY FROM "
+ STOCK_TABLE);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(
STOCK_CSV_VALUES_WITH_HEADER), csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
int i = 0;
for (String value : record) {
assertEquals(value, phoenixResultSet.getString(i + 1));
i++;
}
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
@Test
public void testCSVCommonsUpsert_MultiTenant() throws Exception {
CSVParser parser = null;
PhoenixConnection globalConn = null;
PhoenixConnection tenantConn = null;
try {
// Create table using the global connection
String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE_MULTI
+ "(TENANT_ID VARCHAR NOT NULL, SYMBOL VARCHAR NOT NULL, COMPANY VARCHAR," +
" CONSTRAINT PK PRIMARY KEY(TENANT_ID,SYMBOL)) MULTI_TENANT = true;";
globalConn = DriverManager.getConnection(getUrl()).unwrap(
PhoenixConnection.class);
PhoenixRuntime.executeStatements(globalConn,
new StringReader(statements), null);
globalConn.close();
tenantConn = new PhoenixTestDriver().connect(getUrl() + ";TenantId=acme", new Properties()).unwrap(
PhoenixConnection.class);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(tenantConn, STOCK_TABLE_MULTI,
Collections.<String> emptyList(), true);
csvUtil.upsert(new StringReader(STOCK_CSV_VALUES_WITH_HEADER));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = tenantConn
.prepareStatement("SELECT SYMBOL, COMPANY FROM "
+ STOCK_TABLE_MULTI);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(
STOCK_CSV_VALUES_WITH_HEADER), csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
int i = 0;
for (String value : record) {
assertEquals(value, phoenixResultSet.getString(i + 1));
i++;
}
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (tenantConn != null)
tenantConn.close();
}
}
@Test
public void testTDVCommonsUpsert() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
+ "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
conn = DriverManager.getConnection(getUrl()).unwrap(
PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn,
new StringReader(statements), null);
// Upsert TDV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,Collections.<String> emptyList()
, true, '\t', '"', null, CSVCommonsLoader.DEFAULT_ARRAY_ELEMENT_SEPARATOR);
csvUtil.upsert(new StringReader(STOCK_TDV_VALUES_WITH_HEADER));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn
.prepareStatement("SELECT SYMBOL, COMPANY FROM "
+ STOCK_TABLE);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(
STOCK_TDV_VALUES_WITH_HEADER), csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
int i = 0;
for (String value : record) {
assertEquals(value, phoenixResultSet.getString(i + 1));
i++;
}
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
@Test
public void testCSVUpsertWithCustomDelimiters() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
+ "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
conn = DriverManager.getConnection(getUrl()).unwrap(
PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn,
new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
Arrays.<String> asList(STOCK_COLUMNS), true,
'1', '2', '3', CSVCommonsLoader.DEFAULT_ARRAY_ELEMENT_SEPARATOR);
csvUtil.upsert(new StringReader(STOCK_CSV_VALUES_WITH_DELIMITER));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn
.prepareStatement("SELECT SYMBOL, COMPANY FROM "
+ STOCK_TABLE);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(
STOCK_CSV_VALUES_WITH_DELIMITER), csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
int i = 0;
for (String value : record) {
assertEquals(value, phoenixResultSet.getString(i + 1));
i++;
}
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
@Test
public void testCSVUpsertWithColumns() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
+ "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
conn = DriverManager.getConnection(getUrl())
.unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn,
new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
Arrays.<String> asList(STOCK_COLUMNS), true);
// no header
csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn
.prepareStatement("SELECT SYMBOL, COMPANY FROM "
+ STOCK_TABLE);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(
STOCK_CSV_VALUES), csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
int i = 0;
for (String value : record) {
assertEquals(value, phoenixResultSet.getString(i + 1));
i++;
}
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
@Test
public void testCSVUpsertWithNoColumns() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
+ "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
conn = DriverManager.getConnection(getUrl())
.unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn,
new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
null, true);
csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn
.prepareStatement("SELECT SYMBOL, COMPANY FROM "
+ STOCK_TABLE);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(
STOCK_CSV_VALUES), csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
int i = 0;
for (String value : record) {
assertEquals(value, phoenixResultSet.getString(i + 1));
i++;
}
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
@Test
public void testCSVUpsertWithBogusColumn() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
+ "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
conn = DriverManager.getConnection(getUrl())
.unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn,
new StringReader(statements), null);
// Upsert CSV file, not strict
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
Arrays.asList(STOCK_COLUMNS_WITH_BOGUS), false);
csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn
.prepareStatement("SELECT SYMBOL, COMPANY FROM "
+ STOCK_TABLE);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(STOCK_CSV_VALUES),
csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
assertEquals(record.get(0), phoenixResultSet.getString(1));
assertNull(phoenixResultSet.getString(2));
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
@Test
public void testCSVUpsertWithAllColumn() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
+ "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
conn = DriverManager.getConnection(getUrl())
.unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn,
new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
Arrays.asList("FOO", "BAR"), false);
try {
csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
fail();
} catch (SQLException e) {
assertTrue(
e.getMessage(),
e.getMessage()
.contains(
"ERROR 504 (42703): Undefined column. columnName=STOCK_SYMBOL.[FOO, BAR]"));
}
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
@Test
public void testCSVUpsertWithBogusColumnStrict() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS " + STOCK_TABLE
+ "(SYMBOL VARCHAR NOT NULL PRIMARY KEY, COMPANY VARCHAR);";
conn = DriverManager.getConnection(getUrl())
.unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn,
new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, STOCK_TABLE,
Arrays.asList(STOCK_COLUMNS_WITH_BOGUS), true);
try {
csvUtil.upsert(new StringReader(STOCK_CSV_VALUES));
fail();
} catch (SQLException e) {
assertTrue(
e.getMessage(),
e.getMessage()
.contains(
"ERROR 504 (42703): Undefined column. columnName=STOCK_SYMBOL.BOGUS"));
}
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
@Test
public void testAllDatatypes() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS "
+ DATATYPE_TABLE
+ " (CKEY VARCHAR NOT NULL PRIMARY KEY,"
+ " CVARCHAR VARCHAR, CINTEGER INTEGER, CDECIMAL DECIMAL(31,10), CUNSIGNED_INT UNSIGNED_INT, CBOOLEAN BOOLEAN, CBIGINT BIGINT, CUNSIGNED_LONG UNSIGNED_LONG, CTIME TIME, CDATE DATE);";
conn = DriverManager.getConnection(getUrl())
.unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn,
new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn,
DATATYPE_TABLE, Collections.<String> emptyList(), true);
csvUtil.upsert(new StringReader(DATATYPES_CSV_VALUES));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn
.prepareStatement("SELECT CKEY, CVARCHAR, CINTEGER, CDECIMAL, CUNSIGNED_INT, CBOOLEAN, CBIGINT, CUNSIGNED_LONG, CTIME, CDATE FROM "
+ DATATYPE_TABLE);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(DATATYPES_CSV_VALUES),
csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
int i = 0;
int size = record.size();
for (String value : record) {
assertEquals(value, phoenixResultSet.getObject(i + 1)
.toString().toUpperCase());
if (i < size - 2)
break;
i++;
}
// special case for matching date, time values
assertEquals(DateUtil.parseTime(record.get(8)),
phoenixResultSet.getTime("CTIME"));
assertEquals(DateUtil.parseDate(record.get(9)),
phoenixResultSet.getDate("CDATE"));
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
@Test
public void testCSVCommonsUpsertEncapsulatedControlChars() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS "
+ ENCAPSULATED_CHARS_TABLE
+ "(MYKEY VARCHAR NOT NULL PRIMARY KEY, MYVALUE VARCHAR);";
conn = DriverManager.getConnection(getUrl())
.unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn,
new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn,
ENCAPSULATED_CHARS_TABLE, Collections.<String> emptyList(),
true);
csvUtil.upsert(new StringReader(
CSV_VALUES_ENCAPSULATED_CONTROL_CHARS_WITH_HEADER));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn
.prepareStatement("SELECT MYKEY, MYVALUE FROM "
+ ENCAPSULATED_CHARS_TABLE);
ResultSet phoenixResultSet = statement.executeQuery();
parser = new CSVParser(new StringReader(
CSV_VALUES_ENCAPSULATED_CONTROL_CHARS_WITH_HEADER),
csvUtil.getFormat());
for (CSVRecord record : parser) {
assertTrue(phoenixResultSet.next());
int i = 0;
for (String value : record) {
assertEquals(value, phoenixResultSet.getString(i + 1));
i++;
}
}
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
@Test
public void testCSVCommonsUpsertBadEncapsulatedControlChars()
throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS "
+ ENCAPSULATED_CHARS_TABLE
+ "(MYKEY VARCHAR NOT NULL PRIMARY KEY, MYVALUE VARCHAR);";
conn = DriverManager.getConnection(getUrl())
.unwrap(PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn,
new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn,
ENCAPSULATED_CHARS_TABLE, Collections.<String> emptyList(),
true);
try {
csvUtil.upsert(new StringReader(
CSV_VALUES_BAD_ENCAPSULATED_CONTROL_CHARS_WITH_HEADER));
fail();
} catch (RuntimeException e) {
assertTrue(
e.getMessage(),
e.getMessage()
.contains(
"invalid char between encapsulated token and delimiter"));
}
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
@Test
public void testCSVCommonsUpsert_WithArray() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS ARRAY_TABLE "
+ "(ID BIGINT NOT NULL PRIMARY KEY, VALARRAY INTEGER ARRAY);";
conn = DriverManager.getConnection(getUrl()).unwrap(
PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn,
new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, "ARRAY_TABLE",
null, true, ',', '"', null, "!");
csvUtil.upsert(
new StringReader("ID,VALARRAY\n"
+ "1,2!3!4\n"));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn
.prepareStatement("SELECT ID, VALARRAY FROM ARRAY_TABLE");
ResultSet phoenixResultSet = statement.executeQuery();
assertTrue(phoenixResultSet.next());
assertEquals(1L, phoenixResultSet.getLong(1));
assertEquals(
PArrayDataType.instantiatePhoenixArray(PInteger.INSTANCE, new Integer[]{2, 3, 4}),
phoenixResultSet.getArray(2));
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
@Test
public void testCSVCommonsUpsert_WithTimestamp() throws Exception {
CSVParser parser = null;
PhoenixConnection conn = null;
try {
// Create table
String statements = "CREATE TABLE IF NOT EXISTS TS_TABLE "
+ "(ID BIGINT NOT NULL PRIMARY KEY, TS TIMESTAMP);";
conn = DriverManager.getConnection(getUrl()).unwrap(
PhoenixConnection.class);
PhoenixRuntime.executeStatements(conn,
new StringReader(statements), null);
// Upsert CSV file
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, "TS_TABLE",
null, true, ',', '"', null, "!");
csvUtil.upsert(
new StringReader("ID,TS\n"
+ "1,1970-01-01 00:00:10\n"
+ "2,1970-01-01 00:00:10.123\n"));
// Compare Phoenix ResultSet with CSV file content
PreparedStatement statement = conn
.prepareStatement("SELECT ID, TS FROM TS_TABLE ORDER BY ID");
ResultSet phoenixResultSet = statement.executeQuery();
assertTrue(phoenixResultSet.next());
assertEquals(1L, phoenixResultSet.getLong(1));
assertEquals(10000L, phoenixResultSet.getTimestamp(2).getTime());
assertTrue(phoenixResultSet.next());
assertEquals(2L, phoenixResultSet.getLong(1));
assertEquals(10123L, phoenixResultSet.getTimestamp(2).getTime());
assertFalse(phoenixResultSet.next());
} finally {
if (parser != null)
parser.close();
if (conn != null)
conn.close();
}
}
@Test
public void testCSVCommonsUpsert_NonExistentTable() throws Exception {
PhoenixConnection conn = null;
try {
conn = DriverManager.getConnection(getUrl()).unwrap(
PhoenixConnection.class);
CSVCommonsLoader csvUtil = new CSVCommonsLoader(conn, "NONEXISTENTTABLE",
null, true, ',', '"', '\\', "!");
csvUtil.upsert(
new StringReader("ID,VALARRAY\n"
+ "1,2!3!4\n"));
fail("Trying to load a non-existent table should fail");
} catch (IllegalArgumentException e) {
assertEquals("Table NONEXISTENTTABLE not found", e.getMessage());
} finally {
if (conn != null) {
conn.close();
}
}
}
} | apache-2.0 |
meetdestiny/geronimo-trader | modules/kernel/src/java/org/apache/geronimo/kernel/log/GeronimoLogFactory.java | 4418 | /**
*
* Copyright 2003-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.kernel.log;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogConfigurationException;
import org.apache.commons.logging.LogFactory;
/**
* @version $Rev$ $Date$
*/
public class GeronimoLogFactory extends LogFactory {
private final static Object factoryLock = new Object();
// todo this should use weak references
private static final HashMap instancesByClassLoader = new HashMap();
private static LogFactory logFactory = new BootstrapLogFactory();
public GeronimoLogFactory() {
}
public LogFactory getLogFactory() {
synchronized (factoryLock) {
return logFactory;
}
}
public void setLogFactory(LogFactory logFactory) {
// change the log factory
GeronimoLogFactory.logFactory = logFactory;
// update all known logs to use instances of the new factory
Set logs = getInstances();
for (Iterator iterator = logs.iterator(); iterator.hasNext();) {
GeronimoLog log = (GeronimoLog) iterator.next();
log.setLog(logFactory.getInstance(log.getName()));
}
}
public Set getInstances() {
synchronized (factoryLock) {
Set logs = new HashSet();
for (Iterator iterator = instancesByClassLoader.values().iterator(); iterator.hasNext();) {
Map instanceMap = ((Map) iterator.next());
logs.addAll(instanceMap.values());
}
return logs;
}
}
public Log getInstance(Class clazz) throws LogConfigurationException {
synchronized (factoryLock) {
return getInstance(clazz.getName());
}
}
public Log getInstance(String name) throws LogConfigurationException {
synchronized (factoryLock) {
// get the instances for the context classloader
ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
Map instances = (Map) instancesByClassLoader.get(contextClassLoader);
if (instances == null) {
instances = new HashMap();
instancesByClassLoader.put(contextClassLoader, instances);
}
// get the log
Log log = (Log) instances.get(name);
if (log == null) {
log = new GeronimoLog(name, logFactory.getInstance(name));
instances.put(name, log);
}
return log;
}
}
public void release() {
synchronized (factoryLock) {
// TODO rethink this - it works for now
// for (Iterator maps = instancesByClassLoader.values().iterator(); maps.hasNext();) {
// Map instances = (Map) maps.next();
// for (Iterator logs = instances.values().iterator(); logs.hasNext();) {
// GeronimoLog log = (GeronimoLog) logs.next();
// log.setLog(null);
//
// }
// }
instancesByClassLoader.clear();
}
}
public Object getAttribute(String name) {
synchronized (factoryLock) {
return logFactory.getAttribute(name);
}
}
public String[] getAttributeNames() {
synchronized (factoryLock) {
return logFactory.getAttributeNames();
}
}
public void removeAttribute(String name) {
synchronized (factoryLock) {
logFactory.removeAttribute(name);
}
}
public void setAttribute(String name, Object value) {
synchronized (factoryLock) {
logFactory.setAttribute(name, value);
}
}
}
| apache-2.0 |
google/error-prone | core/src/main/java/com/google/errorprone/bugpatterns/threadsafety/HeldLockAnalyzer.java | 20529 | /*
* Copyright 2014 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.bugpatterns.threadsafety;
import static com.google.errorprone.matchers.method.MethodMatchers.instanceMethod;
import com.google.auto.value.AutoValue;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.errorprone.VisitorState;
import com.google.errorprone.annotations.concurrent.UnlockMethod;
import com.google.errorprone.bugpatterns.threadsafety.GuardedByExpression.Kind;
import com.google.errorprone.bugpatterns.threadsafety.GuardedByExpression.Select;
import com.google.errorprone.matchers.Matcher;
import com.google.errorprone.matchers.Matchers;
import com.google.errorprone.util.ASTHelpers;
import com.sun.source.tree.ClassTree;
import com.sun.source.tree.ExpressionTree;
import com.sun.source.tree.IdentifierTree;
import com.sun.source.tree.LambdaExpressionTree;
import com.sun.source.tree.MemberSelectTree;
import com.sun.source.tree.MethodInvocationTree;
import com.sun.source.tree.MethodTree;
import com.sun.source.tree.NewClassTree;
import com.sun.source.tree.SynchronizedTree;
import com.sun.source.tree.Tree;
import com.sun.source.tree.TryTree;
import com.sun.source.tree.VariableTree;
import com.sun.source.util.TreePathScanner;
import com.sun.source.util.TreeScanner;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Symbol.ClassSymbol;
import com.sun.tools.javac.tree.JCTree;
import com.sun.tools.javac.tree.JCTree.JCExpression;
import com.sun.tools.javac.tree.JCTree.JCNewClass;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import javax.lang.model.element.Modifier;
/**
* A method body analyzer. Responsible for tracking the set of held locks, and checking accesses to
* guarded members.
*
* @author cushon@google.com (Liam Miller-Cushon)
*/
public final class HeldLockAnalyzer {
/** Listener interface for accesses to guarded members. */
public interface LockEventListener {
/**
* Handles a guarded member access.
*
* @param tree The member access expression.
* @param guard The member's guard expression.
* @param locks The set of held locks.
*/
void handleGuardedAccess(ExpressionTree tree, GuardedByExpression guard, HeldLockSet locks);
}
/**
* Analyzes a method body, tracking the set of held locks and checking accesses to guarded
* members.
*/
public static void analyze(
VisitorState state,
LockEventListener listener,
Predicate<Tree> isSuppressed,
GuardedByFlags flags,
boolean reportMissingGuards) {
HeldLockSet locks = HeldLockSet.empty();
locks = handleMonitorGuards(state, locks, flags);
new LockScanner(state, listener, isSuppressed, flags, reportMissingGuards)
.scan(state.getPath(), locks);
}
// Don't use Class#getName() for inner classes, we don't want `Monitor$Guard`
private static final String MONITOR_GUARD_CLASS =
"com.google.common.util.concurrent.Monitor.Guard";
private static HeldLockSet handleMonitorGuards(
VisitorState state, HeldLockSet locks, GuardedByFlags flags) {
JCNewClass newClassTree = ASTHelpers.findEnclosingNode(state.getPath(), JCNewClass.class);
if (newClassTree == null) {
return locks;
}
Symbol clazzSym = ASTHelpers.getSymbol(newClassTree.clazz);
if (!(clazzSym instanceof ClassSymbol)) {
return locks;
}
if (!((ClassSymbol) clazzSym).fullname.contentEquals(MONITOR_GUARD_CLASS)) {
return locks;
}
Optional<GuardedByExpression> lockExpression =
GuardedByBinder.bindExpression(
Iterables.getOnlyElement(newClassTree.getArguments()), state, flags);
if (!lockExpression.isPresent()) {
return locks;
}
return locks.plus(lockExpression.get());
}
private static class LockScanner extends TreePathScanner<Void, HeldLockSet> {
private final VisitorState visitorState;
private final LockEventListener listener;
private final Predicate<Tree> isSuppressed;
private final GuardedByFlags flags;
private final boolean reportMissingGuards;
private static final GuardedByExpression.Factory F = new GuardedByExpression.Factory();
private LockScanner(
VisitorState visitorState,
LockEventListener listener,
Predicate<Tree> isSuppressed,
GuardedByFlags flags,
boolean reportMissingGuards) {
this.visitorState = visitorState;
this.listener = listener;
this.isSuppressed = isSuppressed;
this.flags = flags;
this.reportMissingGuards = reportMissingGuards;
}
@Override
public Void visitMethod(MethodTree tree, HeldLockSet locks) {
if (isSuppressed.apply(tree)) {
return null;
}
// Synchronized instance methods hold the 'this' lock; synchronized static methods
// hold the Class lock for the enclosing class.
Set<Modifier> mods = tree.getModifiers().getFlags();
if (mods.contains(Modifier.SYNCHRONIZED)) {
Symbol owner = (((JCTree.JCMethodDecl) tree).sym.owner);
GuardedByExpression lock =
mods.contains(Modifier.STATIC) ? F.classLiteral(owner) : F.thisliteral();
locks = locks.plus(lock);
}
// @GuardedBy annotations on methods are trusted for declarations, and checked
// for invocations.
for (String guard : GuardedByUtils.getGuardValues(tree, visitorState)) {
Optional<GuardedByExpression> bound =
GuardedByBinder.bindString(
guard, GuardedBySymbolResolver.from(tree, visitorState), flags);
if (bound.isPresent()) {
locks = locks.plus(bound.get());
}
}
return super.visitMethod(tree, locks);
}
@Override
public Void visitTry(TryTree tree, HeldLockSet locks) {
scan(tree.getResources(), locks);
List<? extends Tree> resources = tree.getResources();
scan(resources, locks);
// Cheesy try/finally heuristic: assume that all locks released in the finally
// are held for the entirety of the try and catch statements.
Collection<GuardedByExpression> releasedLocks =
ReleasedLockFinder.find(tree.getFinallyBlock(), visitorState, flags);
if (resources.isEmpty()) {
scan(tree.getBlock(), locks.plusAll(releasedLocks));
} else {
// We don't know what to do with the try-with-resources block.
// TODO(cushon) - recognize common try-with-resources patterns. Currently there is no
// standard implementation of an AutoCloseable lock resource to detect.
}
scan(tree.getCatches(), locks.plusAll(releasedLocks));
scan(tree.getFinallyBlock(), locks);
return null;
}
@Override
public Void visitSynchronized(SynchronizedTree tree, HeldLockSet locks) {
// The synchronized expression is held in the body of the synchronized statement:
Optional<GuardedByExpression> lockExpression =
GuardedByBinder.bindExpression((JCExpression) tree.getExpression(), visitorState, flags);
scan(tree.getBlock(), lockExpression.isPresent() ? locks.plus(lockExpression.get()) : locks);
return null;
}
@Override
public Void visitMemberSelect(MemberSelectTree tree, HeldLockSet locks) {
checkMatch(tree, locks);
return super.visitMemberSelect(tree, locks);
}
@Override
public Void visitIdentifier(IdentifierTree tree, HeldLockSet locks) {
checkMatch(tree, locks);
return super.visitIdentifier(tree, locks);
}
@Override
public Void visitNewClass(NewClassTree tree, HeldLockSet locks) {
scan(tree.getEnclosingExpression(), locks);
scan(tree.getIdentifier(), locks);
scan(tree.getTypeArguments(), locks);
scan(tree.getArguments(), locks);
// Don't descend into bodies of anonymous class declarations;
// their method declarations will be analyzed separately.
return null;
}
@Override
public Void visitLambdaExpression(LambdaExpressionTree node, HeldLockSet heldLockSet) {
// Don't descend into lambdas; they will be analyzed separately.
return null;
}
@Override
public Void visitVariable(VariableTree node, HeldLockSet locks) {
return isSuppressed.apply(node) ? null : super.visitVariable(node, locks);
}
@Override
public Void visitClass(ClassTree node, HeldLockSet locks) {
return isSuppressed.apply(node) ? null : super.visitClass(node, locks);
}
private void checkMatch(ExpressionTree tree, HeldLockSet locks) {
for (String guardString : GuardedByUtils.getGuardValues(tree, visitorState)) {
Optional<GuardedByExpression> guard =
GuardedByBinder.bindString(
guardString,
GuardedBySymbolResolver.from(tree, visitorState.withPath(getCurrentPath())),
flags);
if (!guard.isPresent()) {
if (reportMissingGuards) {
invalidLock(tree, locks, guardString);
}
continue;
}
Optional<GuardedByExpression> boundGuard =
ExpectedLockCalculator.from(
(JCTree.JCExpression) tree, guard.get(), visitorState, flags);
if (!boundGuard.isPresent()) {
// We couldn't resolve a guarded by expression in the current scope, so we can't
// guarantee the access is protected and must report an error to be safe.
invalidLock(tree, locks, guardString);
continue;
}
listener.handleGuardedAccess(tree, boundGuard.get(), locks);
}
}
private void invalidLock(ExpressionTree tree, HeldLockSet locks, String guardString) {
listener.handleGuardedAccess(
tree, new GuardedByExpression.Factory().error(guardString), locks);
}
}
/** An abstraction over the lock classes we understand. */
@AutoValue
abstract static class LockResource {
/** The fully-qualified name of the lock class. */
abstract String className();
/** The method that acquires the lock. */
abstract String lockMethod();
/** The method that releases the lock. */
abstract String unlockMethod();
public Matcher<ExpressionTree> createUnlockMatcher() {
return instanceMethod().onDescendantOf(className()).named(unlockMethod());
}
public Matcher<ExpressionTree> createLockMatcher() {
return instanceMethod().onDescendantOf(className()).named(lockMethod());
}
static LockResource create(String className, String lockMethod, String unlockMethod) {
return new AutoValue_HeldLockAnalyzer_LockResource(className, lockMethod, unlockMethod);
}
}
/** The set of supported lock classes. */
private static final ImmutableList<LockResource> LOCK_RESOURCES =
ImmutableList.of(
LockResource.create("java.util.concurrent.locks.Lock", "lock", "unlock"),
LockResource.create("com.google.common.util.concurrent.Monitor", "enter", "leave"),
LockResource.create("java.util.concurrent.Semaphore", "acquire", "release"));
private static class LockOperationFinder extends TreeScanner<Void, Void> {
static Collection<GuardedByExpression> find(
Tree tree,
VisitorState state,
Matcher<ExpressionTree> lockOperationMatcher,
GuardedByFlags flags) {
if (tree == null) {
return Collections.emptyList();
}
LockOperationFinder finder = new LockOperationFinder(state, lockOperationMatcher, flags);
tree.accept(finder, null);
return finder.locks;
}
private static final String READ_WRITE_LOCK_CLASS = "java.util.concurrent.locks.ReadWriteLock";
private final Matcher<ExpressionTree> lockOperationMatcher;
private final GuardedByFlags flags;
/** Matcher for ReadWriteLock lock accessors. */
private static final Matcher<ExpressionTree> READ_WRITE_ACCESSOR_MATCHER =
Matchers.<ExpressionTree>anyOf(
instanceMethod().onDescendantOf(READ_WRITE_LOCK_CLASS).named("readLock"),
instanceMethod().onDescendantOf(READ_WRITE_LOCK_CLASS).named("writeLock"));
private final VisitorState state;
private final Set<GuardedByExpression> locks = new HashSet<>();
private LockOperationFinder(
VisitorState state, Matcher<ExpressionTree> lockOperationMatcher, GuardedByFlags flags) {
this.state = state;
this.lockOperationMatcher = lockOperationMatcher;
this.flags = flags;
}
@Override
public Void visitMethodInvocation(MethodInvocationTree tree, Void unused) {
handleReleasedLocks(tree);
handleUnlockAnnotatedMethods(tree);
return null;
}
/**
* Checks for locks that are released directly. Currently only {@link
* java.util.concurrent.locks.Lock#unlock()} is supported.
*
* <p>TODO(cushon): Semaphores, CAS, ... ?
*/
private void handleReleasedLocks(MethodInvocationTree tree) {
if (!lockOperationMatcher.matches(tree, state)) {
return;
}
Optional<GuardedByExpression> node =
GuardedByBinder.bindExpression((JCExpression) tree, state, flags);
if (node.isPresent()) {
GuardedByExpression receiver = ((GuardedByExpression.Select) node.get()).base();
locks.add(receiver);
// The analysis interprets members guarded by {@link ReadWriteLock}s as requiring that
// either the read or write lock is held for all accesses, but doesn't enforce a policy
// for which of the two is held. Technically the write lock should be required while
// writing to the guarded member and the read lock should be used for all other accesses,
// but in practice the write lock is frequently held while performing a mutating operation
// on the object stored in the field (e.g. inserting into a List).
// TODO(cushon): investigate a better way to specify the contract for ReadWriteLocks.
if ((tree.getMethodSelect() instanceof MemberSelectTree)
&& READ_WRITE_ACCESSOR_MATCHER.matches(ASTHelpers.getReceiver(tree), state)) {
locks.add(((Select) receiver).base());
}
}
}
/** Checks {@link UnlockMethod}-annotated methods. */
private void handleUnlockAnnotatedMethods(MethodInvocationTree tree) {
UnlockMethod annotation = ASTHelpers.getAnnotation(tree, UnlockMethod.class);
if (annotation == null) {
return;
}
for (String lockString : annotation.value()) {
Optional<GuardedByExpression> guard =
GuardedByBinder.bindString(
lockString, GuardedBySymbolResolver.from(tree, state), flags);
// TODO(cushon): http://docs.oracle.com/javase/8/docs/api/java/util/Optional.html#ifPresent
if (guard.isPresent()) {
Optional<GuardedByExpression> lock =
ExpectedLockCalculator.from((JCExpression) tree, guard.get(), state, flags);
if (lock.isPresent()) {
locks.add(lock.get());
}
}
}
}
}
/**
* Find the locks that are released in the given tree. (e.g. the 'finally' clause of a
* try/finally)
*/
static final class ReleasedLockFinder {
/** Matcher for methods that release lock resources. */
private static final Matcher<ExpressionTree> UNLOCK_MATCHER =
Matchers.<ExpressionTree>anyOf(unlockMatchers());
private static Iterable<Matcher<ExpressionTree>> unlockMatchers() {
return Iterables.transform(LOCK_RESOURCES, LockResource::createUnlockMatcher);
}
static Collection<GuardedByExpression> find(
Tree tree, VisitorState state, GuardedByFlags flags) {
return LockOperationFinder.find(tree, state, UNLOCK_MATCHER, flags);
}
private ReleasedLockFinder() {}
}
/**
* Find the locks that are acquired in the given tree. (e.g. the body of a @LockMethod-annotated
* method.)
*/
static final class AcquiredLockFinder {
/** Matcher for methods that acquire lock resources. */
private static final Matcher<ExpressionTree> LOCK_MATCHER =
Matchers.<ExpressionTree>anyOf(unlockMatchers());
private static Iterable<Matcher<ExpressionTree>> unlockMatchers() {
return Iterables.transform(LOCK_RESOURCES, LockResource::createLockMatcher);
}
static Collection<GuardedByExpression> find(
Tree tree, VisitorState state, GuardedByFlags flags) {
return LockOperationFinder.find(tree, state, LOCK_MATCHER, flags);
}
private AcquiredLockFinder() {}
}
/**
* Utility for discovering the lock expressions that needs to be held when accessing specific
* guarded members.
*/
public static final class ExpectedLockCalculator {
private static final GuardedByExpression.Factory F = new GuardedByExpression.Factory();
/**
* Determine the lock expression that needs to be held when accessing a specific guarded member.
*
* <p>If the lock expression resolves to an instance member, the result will be a select
* expression with the same base as the original guarded member access.
*
* <p>For example:
*
* <pre><code>
* class MyClass {
* final Object mu = new Object();
* {@literal @}GuardedBy("mu")
* int x;
* }
* void m(MyClass myClass) {
* myClass.x++;
* }
* </code></pre>
*
* To determine the lock that must be held when accessing myClass.x, from is called with
* "myClass.x" and "mu", and returns "myClass.mu".
*/
public static Optional<GuardedByExpression> from(
JCTree.JCExpression guardedMemberExpression,
GuardedByExpression guard,
VisitorState state,
GuardedByFlags flags) {
if (isGuardReferenceAbsolute(guard)) {
return Optional.of(guard);
}
Optional<GuardedByExpression> guardedMember =
GuardedByBinder.bindExpression(guardedMemberExpression, state, flags);
if (!guardedMember.isPresent()) {
return Optional.empty();
}
GuardedByExpression memberBase = ((GuardedByExpression.Select) guardedMember.get()).base();
return Optional.of(helper(guard, memberBase));
}
/**
* Returns true for guard expressions that require an 'absolute' reference, i.e. where the
* expression to access the lock is always the same, regardless of how the guarded member is
* accessed.
*
* <p>E.g.:
*
* <ul>
* <li>class object: 'TypeName.class'
* <li>static access: 'TypeName.member'
* <li>enclosing instance: 'Outer.this'
* <li>enclosing instance member: 'Outer.this.member'
* </ul>
*/
private static boolean isGuardReferenceAbsolute(GuardedByExpression guard) {
GuardedByExpression instance = guard.kind() == Kind.SELECT ? getSelectInstance(guard) : guard;
return instance.kind() != Kind.THIS;
}
/** Gets the base expression of a (possibly nested) member select expression. */
private static GuardedByExpression getSelectInstance(GuardedByExpression guard) {
if (guard instanceof Select) {
return getSelectInstance(((Select) guard).base());
}
return guard;
}
private static GuardedByExpression helper(
GuardedByExpression lockExpression, GuardedByExpression memberAccess) {
switch (lockExpression.kind()) {
case SELECT:
{
GuardedByExpression.Select lockSelect = (GuardedByExpression.Select) lockExpression;
return F.select(helper(lockSelect.base(), memberAccess), lockSelect.sym());
}
case THIS:
return memberAccess;
default:
throw new IllegalGuardedBy(lockExpression.toString());
}
}
private ExpectedLockCalculator() {}
}
private HeldLockAnalyzer() {}
}
| apache-2.0 |
sopel39/presto | presto-main/src/test/java/io/prestosql/type/BenchmarkDecimalOperators.java | 22132 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.type;
import com.google.common.collect.ImmutableList;
import io.prestosql.RowPagesBuilder;
import io.prestosql.Session;
import io.prestosql.execution.warnings.WarningCollector;
import io.prestosql.metadata.MetadataManager;
import io.prestosql.operator.DriverYieldSignal;
import io.prestosql.operator.project.PageProcessor;
import io.prestosql.spi.Page;
import io.prestosql.spi.type.BigintType;
import io.prestosql.spi.type.DecimalType;
import io.prestosql.spi.type.DoubleType;
import io.prestosql.spi.type.SqlDecimal;
import io.prestosql.spi.type.Type;
import io.prestosql.sql.gen.ExpressionCompiler;
import io.prestosql.sql.gen.PageFunctionCompiler;
import io.prestosql.sql.parser.SqlParser;
import io.prestosql.sql.planner.Symbol;
import io.prestosql.sql.planner.SymbolToInputRewriter;
import io.prestosql.sql.planner.TypeProvider;
import io.prestosql.sql.relational.RowExpression;
import io.prestosql.sql.relational.SqlToRowExpressionTranslator;
import io.prestosql.sql.tree.Expression;
import io.prestosql.sql.tree.NodeRef;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.RunnerException;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.VerboseMode;
import org.testng.annotations.Test;
import java.math.BigInteger;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Random;
import java.util.concurrent.TimeUnit;
import static com.google.common.collect.Iterables.getOnlyElement;
import static io.prestosql.RowPagesBuilder.rowPagesBuilder;
import static io.prestosql.SessionTestUtils.TEST_SESSION;
import static io.prestosql.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext;
import static io.prestosql.metadata.FunctionKind.SCALAR;
import static io.prestosql.metadata.MetadataManager.createTestMetadataManager;
import static io.prestosql.operator.scalar.FunctionAssertions.createExpression;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.spi.type.DecimalType.createDecimalType;
import static io.prestosql.spi.type.DoubleType.DOUBLE;
import static io.prestosql.sql.analyzer.ExpressionAnalyzer.getExpressionTypesFromInput;
import static io.prestosql.testing.TestingConnectorSession.SESSION;
import static io.prestosql.testing.TestingSession.testSessionBuilder;
import static java.math.BigInteger.ONE;
import static java.math.BigInteger.ZERO;
import static java.util.Collections.emptyList;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
import static org.openjdk.jmh.annotations.Scope.Thread;
@State(Scope.Thread)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Fork(value = 3)
@Warmup(iterations = 20, timeUnit = TimeUnit.MILLISECONDS)
@Measurement(iterations = 10, timeUnit = TimeUnit.MILLISECONDS)
public class BenchmarkDecimalOperators
{
private static final int PAGE_SIZE = 30000;
private static final DecimalType SHORT_DECIMAL_TYPE = createDecimalType(10, 0);
private static final DecimalType LONG_DECIMAL_TYPE = createDecimalType(20, 0);
private static final SqlParser SQL_PARSER = new SqlParser();
@State(Thread)
public static class CastDoubleToDecimalBenchmarkState
extends BaseState
{
private static final int SCALE = 2;
@Param({"10", "35", "BIGINT"})
private String precision = "10";
@Setup
public void setup()
{
addSymbol("d1", DOUBLE);
String expression;
if (precision.equals("BIGINT")) {
setDoubleMaxValue(Long.MAX_VALUE);
expression = "CAST(d1 AS BIGINT)";
}
else {
setDoubleMaxValue(Math.pow(9, Integer.valueOf(precision) - SCALE));
expression = String.format("CAST(d1 AS DECIMAL(%s, %d))", precision, SCALE);
}
generateRandomInputPage();
generateProcessor(expression);
}
}
@Benchmark
public Object castDoubleToDecimalBenchmark(CastDoubleToDecimalBenchmarkState state)
{
return execute(state);
}
@Test
public void testCastDoubleToDecimalBenchmark()
{
CastDoubleToDecimalBenchmarkState state = new CastDoubleToDecimalBenchmarkState();
state.setup();
castDoubleToDecimalBenchmark(state);
}
@State(Thread)
public static class CastDecimalToDoubleBenchmarkState
extends BaseState
{
private static final int SCALE = 10;
@Param({"15", "35"})
private String precision = "15";
@Setup
public void setup()
{
addSymbol("v1", createDecimalType(Integer.valueOf(precision), SCALE));
String expression = "CAST(v1 AS DOUBLE)";
generateRandomInputPage();
generateProcessor(expression);
}
}
@Benchmark
public Object castDecimalToDoubleBenchmark(CastDecimalToDoubleBenchmarkState state)
{
return execute(state);
}
@Test
public void testCastDecimalToDoubleBenchmark()
{
CastDecimalToDoubleBenchmarkState state = new CastDecimalToDoubleBenchmarkState();
state.setup();
castDecimalToDoubleBenchmark(state);
}
@State(Thread)
public static class CastDecimalToVarcharBenchmarkState
extends BaseState
{
private static final int SCALE = 10;
@Param({"15", "35"})
private String precision = "35";
@Setup
public void setup()
{
addSymbol("v1", createDecimalType(Integer.valueOf(precision), SCALE));
String expression = "CAST(v1 AS VARCHAR)";
generateRandomInputPage();
generateProcessor(expression);
}
}
@Benchmark
public Object castDecimalToVarcharBenchmark(CastDecimalToVarcharBenchmarkState state)
{
return execute(state);
}
@Test
public void testCastDecimalToVarcharBenchmark()
{
CastDecimalToVarcharBenchmarkState state = new CastDecimalToVarcharBenchmarkState();
state.setup();
castDecimalToVarcharBenchmark(state);
}
@State(Thread)
public static class AdditionBenchmarkState
extends BaseState
{
@Param({"d1 + d2",
"d1 + d2 + d3 + d4",
"s1 + s2",
"s1 + s2 + s3 + s4",
"l1 + l2",
"l1 + l2 + l3 + l4",
"s2 + l3 + l1 + s4"})
private String expression = "d1 + d2";
@Setup
public void setup()
{
addSymbol("d1", DOUBLE);
addSymbol("d2", DOUBLE);
addSymbol("d3", DOUBLE);
addSymbol("d4", DOUBLE);
addSymbol("s1", createDecimalType(10, 5));
addSymbol("s2", createDecimalType(7, 2));
addSymbol("s3", createDecimalType(12, 2));
addSymbol("s4", createDecimalType(2, 1));
addSymbol("l1", createDecimalType(35, 10));
addSymbol("l2", createDecimalType(25, 5));
addSymbol("l3", createDecimalType(20, 6));
addSymbol("l4", createDecimalType(25, 8));
generateRandomInputPage();
generateProcessor(expression);
}
}
@Benchmark
public Object additionBenchmark(AdditionBenchmarkState state)
{
return execute(state);
}
@Test
public void testAdditionBenchmark()
{
AdditionBenchmarkState state = new AdditionBenchmarkState();
state.setup();
additionBenchmark(state);
}
@State(Thread)
public static class MultiplyBenchmarkState
extends BaseState
{
@Param({"d1 * d2",
"d1 * d2 * d3 * d4",
"i1 * i2",
// short short -> short
"s1 * s2",
"s1 * s2 * s5 * s6",
// short short -> long
"s3 * s4",
// long short -> long
"l2 * s2",
"l2 * s2 * s5 * s6",
// short long -> long
"s1 * l2",
// long long -> long
"l1 * l2"})
private String expression = "d1 * d2";
@Setup
public void setup()
{
addSymbol("d1", DOUBLE);
addSymbol("d2", DOUBLE);
addSymbol("d3", DOUBLE);
addSymbol("d4", DOUBLE);
addSymbol("i1", BIGINT);
addSymbol("i2", BIGINT);
addSymbol("s1", createDecimalType(5, 2));
addSymbol("s2", createDecimalType(3, 1));
addSymbol("s3", createDecimalType(10, 5));
addSymbol("s4", createDecimalType(10, 2));
addSymbol("s5", createDecimalType(3, 2));
addSymbol("s6", createDecimalType(2, 1));
addSymbol("l1", createDecimalType(19, 10));
addSymbol("l2", createDecimalType(19, 5));
generateRandomInputPage();
generateProcessor(expression);
}
}
@Benchmark
public Object multiplyBenchmark(MultiplyBenchmarkState state)
{
return execute(state);
}
@Test
public void testMultiplyBenchmark()
{
MultiplyBenchmarkState state = new MultiplyBenchmarkState();
state.setup();
multiplyBenchmark(state);
}
@State(Thread)
public static class DivisionBenchmarkState
extends BaseState
{
@Param({"d1 / d2",
"d1 / d2 / d3 / d4",
"i1 / i2",
"i1 / i2 / i3 / i4",
// short short -> short
"s1 / s2",
"s1 / s2 / s2 / s2",
// short short -> long
"s1 / s3",
// short long -> short
"s2 / l1",
// long short -> long
"l1 / s2",
// short long -> long
"s3 / l1",
// long long -> long
"l2 / l3",
"l2 / l4 / l4 / l4",
"l2 / s4 / s4 / s4"})
private String expression = "d1 / d2";
@Setup
public void setup()
{
addSymbol("d1", DOUBLE);
addSymbol("d2", DOUBLE);
addSymbol("d3", DOUBLE);
addSymbol("d4", DOUBLE);
addSymbol("i1", BIGINT);
addSymbol("i2", BIGINT);
addSymbol("i3", BIGINT);
addSymbol("i4", BIGINT);
addSymbol("s1", createDecimalType(8, 3));
addSymbol("s2", createDecimalType(6, 2));
addSymbol("s3", createDecimalType(17, 7));
addSymbol("s4", createDecimalType(3, 2));
addSymbol("l1", createDecimalType(19, 3));
addSymbol("l2", createDecimalType(20, 3));
addSymbol("l3", createDecimalType(21, 10));
addSymbol("l4", createDecimalType(19, 4));
generateRandomInputPage();
generateProcessor(expression);
}
}
@Benchmark
public Object divisionBenchmark(DivisionBenchmarkState state)
{
return execute(state);
}
@Test
public void testDivisionBenchmark()
{
DivisionBenchmarkState state = new DivisionBenchmarkState();
state.setup();
divisionBenchmark(state);
}
@State(Thread)
public static class ModuloBenchmarkState
extends BaseState
{
@Param({"d1 % d2",
"d1 % d2 % d3 % d4",
"i1 % i2",
"i1 % i2 % i3 % i4",
// short short -> short
"s1 % s2",
"s1 % s2 % s2 % s2",
// short long -> short
"s2 % l2",
// long short -> long
"l3 % s3",
// short long -> long
"s4 % l3",
// long long -> long
"l2 % l3",
"l2 % l3 % l4 % l1"})
private String expression = "d1 % d2";
@Setup
public void setup()
{
addSymbol("d1", DOUBLE);
addSymbol("d2", DOUBLE);
addSymbol("d3", DOUBLE);
addSymbol("d4", DOUBLE);
addSymbol("i1", BIGINT);
addSymbol("i2", BIGINT);
addSymbol("i3", BIGINT);
addSymbol("i4", BIGINT);
addSymbol("s1", createDecimalType(8, 3));
addSymbol("s2", createDecimalType(6, 2));
addSymbol("s3", createDecimalType(9, 0));
addSymbol("s4", createDecimalType(12, 2));
addSymbol("l1", createDecimalType(19, 3));
addSymbol("l2", createDecimalType(20, 3));
addSymbol("l3", createDecimalType(21, 10));
addSymbol("l4", createDecimalType(19, 4));
generateRandomInputPage();
generateProcessor(expression);
}
}
@Benchmark
public Object moduloBenchmark(ModuloBenchmarkState state)
{
return execute(state);
}
@Test
public void testModuloBenchmark()
{
ModuloBenchmarkState state = new ModuloBenchmarkState();
state.setup();
moduloBenchmark(state);
}
@State(Thread)
public static class InequalityBenchmarkState
extends BaseState
{
@Param({"d1 < d2",
"d1 < d2 AND d1 < d3 AND d1 < d4 AND d2 < d3 AND d2 < d4 AND d3 < d4",
"s1 < s2",
"s1 < s2 AND s1 < s3 AND s1 < s4 AND s2 < s3 AND s2 < s4 AND s3 < s4",
"l1 < l2",
"l1 < l2 AND l1 < l3 AND l1 < l4 AND l2 < l3 AND l2 < l4 AND l3 < l4"})
private String expression = "d1 < d2";
@Setup
public void setup()
{
addSymbol("d1", DOUBLE);
addSymbol("d2", DOUBLE);
addSymbol("d3", DOUBLE);
addSymbol("d4", DOUBLE);
addSymbol("s1", SHORT_DECIMAL_TYPE);
addSymbol("s2", SHORT_DECIMAL_TYPE);
addSymbol("s3", SHORT_DECIMAL_TYPE);
addSymbol("s4", SHORT_DECIMAL_TYPE);
addSymbol("l1", LONG_DECIMAL_TYPE);
addSymbol("l2", LONG_DECIMAL_TYPE);
addSymbol("l3", LONG_DECIMAL_TYPE);
addSymbol("l4", LONG_DECIMAL_TYPE);
generateInputPage(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11);
generateProcessor(expression);
}
}
@Benchmark
public Object inequalityBenchmark(InequalityBenchmarkState state)
{
return execute(state);
}
@Test
public void testInequalityBenchmark()
{
InequalityBenchmarkState state = new InequalityBenchmarkState();
state.setup();
inequalityBenchmark(state);
}
@State(Thread)
public static class DecimalToShortDecimalCastBenchmarkState
extends BaseState
{
@Param({"cast(l_38_30 as decimal(8, 0))",
"cast(l_26_18 as decimal(8, 0))",
"cast(l_20_12 as decimal(8, 0))",
"cast(l_20_8 as decimal(8, 0))",
"cast(s_17_9 as decimal(8, 0))"})
private String expression = "cast(l_38_30 as decimal(8, 0))";
@Setup
public void setup()
{
addSymbol("l_38_30", createDecimalType(38, 30));
addSymbol("l_26_18", createDecimalType(26, 18));
addSymbol("l_20_12", createDecimalType(20, 12));
addSymbol("l_20_8", createDecimalType(20, 8));
addSymbol("s_17_9", createDecimalType(17, 9));
generateInputPage(10000, 10000, 10000, 10000, 10000);
generateProcessor(expression);
}
}
@Benchmark
public Object decimalToShortDecimalCastBenchmark(DecimalToShortDecimalCastBenchmarkState state)
{
return execute(state);
}
@Test
public void testDecimalToShortDecimalCastBenchmark()
{
DecimalToShortDecimalCastBenchmarkState state = new DecimalToShortDecimalCastBenchmarkState();
state.setup();
decimalToShortDecimalCastBenchmark(state);
}
private Object execute(BaseState state)
{
return ImmutableList.copyOf(
state.getProcessor().process(
SESSION,
new DriverYieldSignal(),
newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()),
state.getInputPage()));
}
private static class BaseState
{
private final MetadataManager metadata = createTestMetadataManager();
private final Session session = testSessionBuilder().build();
private final Random random = new Random();
protected final Map<String, Symbol> symbols = new HashMap<>();
protected final Map<Symbol, Type> symbolTypes = new HashMap<>();
private final Map<Symbol, Integer> sourceLayout = new HashMap<>();
protected final List<Type> types = new LinkedList<>();
protected Page inputPage;
private PageProcessor processor;
private double doubleMaxValue = 2L << 31;
public Page getInputPage()
{
return inputPage;
}
public PageProcessor getProcessor()
{
return processor;
}
protected void addSymbol(String name, Type type)
{
Symbol symbol = new Symbol(name);
symbols.put(name, symbol);
symbolTypes.put(symbol, type);
sourceLayout.put(symbol, types.size());
types.add(type);
}
protected void generateRandomInputPage()
{
RowPagesBuilder buildPagesBuilder = rowPagesBuilder(types);
for (int i = 0; i < PAGE_SIZE; i++) {
Object[] values = types.stream()
.map(this::generateRandomValue)
.collect(toList()).toArray();
buildPagesBuilder.row(values);
}
inputPage = getOnlyElement(buildPagesBuilder.build());
}
protected void generateInputPage(int... initialValues)
{
RowPagesBuilder buildPagesBuilder = rowPagesBuilder(types);
buildPagesBuilder.addSequencePage(PAGE_SIZE, initialValues);
inputPage = getOnlyElement(buildPagesBuilder.build());
}
protected void generateProcessor(String expression)
{
processor = new ExpressionCompiler(metadata, new PageFunctionCompiler(metadata, 0)).compilePageProcessor(Optional.empty(), ImmutableList.of(rowExpression(expression))).get();
}
protected void setDoubleMaxValue(double doubleMaxValue)
{
this.doubleMaxValue = doubleMaxValue;
}
private RowExpression rowExpression(String expression)
{
Expression inputReferenceExpression = new SymbolToInputRewriter(sourceLayout).rewrite(createExpression(expression, metadata, TypeProvider.copyOf(symbolTypes)));
Map<Integer, Type> types = sourceLayout.entrySet().stream()
.collect(toMap(Map.Entry::getValue, entry -> symbolTypes.get(entry.getKey())));
Map<NodeRef<Expression>, Type> expressionTypes = getExpressionTypesFromInput(TEST_SESSION, metadata, SQL_PARSER, types, inputReferenceExpression, emptyList(), WarningCollector.NOOP);
return SqlToRowExpressionTranslator.translate(inputReferenceExpression, SCALAR, expressionTypes, metadata.getFunctionRegistry(), metadata.getTypeManager(), TEST_SESSION, true);
}
private Object generateRandomValue(Type type)
{
if (type instanceof DoubleType) {
return random.nextDouble() * (2L * doubleMaxValue) - doubleMaxValue;
}
else if (type instanceof DecimalType) {
return randomDecimal((DecimalType) type);
}
else if (type instanceof BigintType) {
int randomInt = random.nextInt();
return randomInt == 0 ? 1 : randomInt;
}
throw new UnsupportedOperationException(type.toString());
}
private SqlDecimal randomDecimal(DecimalType type)
{
int maxBits = (int) (Math.log(Math.pow(10, type.getPrecision())) / Math.log(2));
BigInteger bigInteger = new BigInteger(maxBits, random);
if (bigInteger.equals(ZERO)) {
bigInteger = ONE;
}
if (random.nextBoolean()) {
bigInteger = bigInteger.negate();
}
return new SqlDecimal(bigInteger, type.getPrecision(), type.getScale());
}
}
public static void main(String[] args)
throws RunnerException
{
Options options = new OptionsBuilder()
.verbosity(VerboseMode.NORMAL)
.include(".*" + BenchmarkDecimalOperators.class.getSimpleName() + ".*")
.build();
new Runner(options).run();
}
}
| apache-2.0 |
andreagenso/java2scala | test/J2s/java/openjdk-6-src-b27/jdk/src/share/classes/java/util/logging/MemoryHandler.java | 9924 | /*
* Copyright (c) 2000, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.util.logging;
/**
* <tt>Handler</tt> that buffers requests in a circular buffer in memory.
* <p>
* Normally this <tt>Handler</tt> simply stores incoming <tt>LogRecords</tt>
* into its memory buffer and discards earlier records. This buffering
* is very cheap and avoids formatting costs. On certain trigger
* conditions, the <tt>MemoryHandler</tt> will push out its current buffer
* contents to a target <tt>Handler</tt>, which will typically publish
* them to the outside world.
* <p>
* There are three main models for triggering a push of the buffer:
* <ul>
* <li>
* An incoming <tt>LogRecord</tt> has a type that is greater than
* a pre-defined level, the <tt>pushLevel</tt>.
* <li>
* An external class calls the <tt>push</tt> method explicitly.
* <li>
* A subclass overrides the <tt>log</tt> method and scans each incoming
* <tt>LogRecord</tt> and calls <tt>push</tt> if a record matches some
* desired criteria.
* </ul>
* <p>
* <b>Configuration:</b>
* By default each <tt>MemoryHandler</tt> is initialized using the following
* LogManager configuration properties. If properties are not defined
* (or have invalid values) then the specified default values are used.
* If no default value is defined then a RuntimeException is thrown.
* <ul>
* <li> java.util.logging.MemoryHandler.level
* specifies the level for the <tt>Handler</tt>
* (defaults to <tt>Level.ALL</tt>).
* <li> java.util.logging.MemoryHandler.filter
* specifies the name of a <tt>Filter</tt> class to use
* (defaults to no <tt>Filter</tt>).
* <li> java.util.logging.MemoryHandler.size
* defines the buffer size (defaults to 1000).
* <li> java.util.logging.MemoryHandler.push
* defines the <tt>pushLevel</tt> (defaults to <tt>level.SEVERE</tt>).
* <li> java.util.logging.MemoryHandler.target
* specifies the name of the target <tt>Handler </tt> class.
* (no default).
* </ul>
*
* @since 1.4
*/
public class MemoryHandler extends Handler {
private final static int DEFAULT_SIZE = 1000;
private Level pushLevel;
private int size;
private Handler target;
private LogRecord buffer[];
int start, count;
// Private method to configure a ConsoleHandler from LogManager
// properties and/or default values as specified in the class
// javadoc.
private void configure() {
LogManager manager = LogManager.getLogManager();
String cname = getClass().getName();
pushLevel = manager.getLevelProperty(cname +".push", Level.SEVERE);
size = manager.getIntProperty(cname + ".size", DEFAULT_SIZE);
if (size <= 0) {
size = DEFAULT_SIZE;
}
setLevel(manager.getLevelProperty(cname +".level", Level.ALL));
setFilter(manager.getFilterProperty(cname +".filter", null));
setFormatter(manager.getFormatterProperty(cname +".formatter", new SimpleFormatter()));
}
/**
* Create a <tt>MemoryHandler</tt> and configure it based on
* <tt>LogManager</tt> configuration properties.
*/
public MemoryHandler() {
sealed = false;
configure();
sealed = true;
String name = "???";
try {
LogManager manager = LogManager.getLogManager();
name = manager.getProperty("java.util.logging.MemoryHandler.target");
Class clz = ClassLoader.getSystemClassLoader().loadClass(name);
target = (Handler) clz.newInstance();
} catch (Exception ex) {
throw new RuntimeException("MemoryHandler can't load handler \"" + name + "\"" , ex);
}
init();
}
// Initialize. Size is a count of LogRecords.
private void init() {
buffer = new LogRecord[size];
start = 0;
count = 0;
}
/**
* Create a <tt>MemoryHandler</tt>.
* <p>
* The <tt>MemoryHandler</tt> is configured based on <tt>LogManager</tt>
* properties (or their default values) except that the given <tt>pushLevel</tt>
* argument and buffer size argument are used.
*
* @param target the Handler to which to publish output.
* @param size the number of log records to buffer (must be greater than zero)
* @param pushLevel message level to push on
*
* @throws IllegalArgumentException is size is <= 0
*/
public MemoryHandler(Handler target, int size, Level pushLevel) {
if (target == null || pushLevel == null) {
throw new NullPointerException();
}
if (size <= 0) {
throw new IllegalArgumentException();
}
sealed = false;
configure();
sealed = true;
this.target = target;
this.pushLevel = pushLevel;
this.size = size;
init();
}
/**
* Store a <tt>LogRecord</tt> in an internal buffer.
* <p>
* If there is a <tt>Filter</tt>, its <tt>isLoggable</tt>
* method is called to check if the given log record is loggable.
* If not we return. Otherwise the given record is copied into
* an internal circular buffer. Then the record's level property is
* compared with the <tt>pushLevel</tt>. If the given level is
* greater than or equal to the <tt>pushLevel</tt> then <tt>push</tt>
* is called to write all buffered records to the target output
* <tt>Handler</tt>.
*
* @param record description of the log event. A null record is
* silently ignored and is not published
*/
public synchronized void publish(LogRecord record) {
if (!isLoggable(record)) {
return;
}
int ix = (start+count)%buffer.length;
buffer[ix] = record;
if (count < buffer.length) {
count++;
} else {
start++;
start %= buffer.length;
}
if (record.getLevel().intValue() >= pushLevel.intValue()) {
push();
}
}
/**
* Push any buffered output to the target <tt>Handler</tt>.
* <p>
* The buffer is then cleared.
*/
public synchronized void push() {
for (int i = 0; i < count; i++) {
int ix = (start+i)%buffer.length;
LogRecord record = buffer[ix];
target.publish(record);
}
// Empty the buffer.
start = 0;
count = 0;
}
/**
* Causes a flush on the target <tt>Handler</tt>.
* <p>
* Note that the current contents of the <tt>MemoryHandler</tt>
* buffer are <b>not</b> written out. That requires a "push".
*/
public void flush() {
target.flush();
}
/**
* Close the <tt>Handler</tt> and free all associated resources.
* This will also close the target <tt>Handler</tt>.
*
* @exception SecurityException if a security manager exists and if
* the caller does not have <tt>LoggingPermission("control")</tt>.
*/
public void close() throws SecurityException {
target.close();
setLevel(Level.OFF);
}
/**
* Set the <tt>pushLevel</tt>. After a <tt>LogRecord</tt> is copied
* into our internal buffer, if its level is greater than or equal to
* the <tt>pushLevel</tt>, then <tt>push</tt> will be called.
*
* @param newLevel the new value of the <tt>pushLevel</tt>
* @exception SecurityException if a security manager exists and if
* the caller does not have <tt>LoggingPermission("control")</tt>.
*/
public void setPushLevel(Level newLevel) throws SecurityException {
if (newLevel == null) {
throw new NullPointerException();
}
LogManager manager = LogManager.getLogManager();
checkPermission();
pushLevel = newLevel;
}
/**
* Get the <tt>pushLevel</tt>.
*
* @return the value of the <tt>pushLevel</tt>
*/
public synchronized Level getPushLevel() {
return pushLevel;
}
/**
* Check if this <tt>Handler</tt> would actually log a given
* <tt>LogRecord</tt> into its internal buffer.
* <p>
* This method checks if the <tt>LogRecord</tt> has an appropriate level and
* whether it satisfies any <tt>Filter</tt>. However it does <b>not</b>
* check whether the <tt>LogRecord</tt> would result in a "push" of the
* buffer contents. It will return false if the <tt>LogRecord</tt> is Null.
* <p>
* @param record a <tt>LogRecord</tt>
* @return true if the <tt>LogRecord</tt> would be logged.
*
*/
public boolean isLoggable(LogRecord record) {
return super.isLoggable(record);
}
}
| apache-2.0 |
natabeck/cloud-slang | cloudslang-entities/src/main/java/io/cloudslang/lang/entities/ScoreLangConstants.java | 2665 | /*
* (c) Copyright 2014 Hewlett-Packard Development Company, L.P.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0 which accompany this distribution.
*
* The Apache License is available at
* http://www.apache.org/licenses/LICENSE-2.0
*/
package io.cloudslang.lang.entities;
/**
* User: stoneo
* Date: 07/10/2014
* Time: 12:52
*/
public interface ScoreLangConstants {
String RUN_ENV = "runEnv";
String HOOKS = "hooks";
String NODE_NAME_KEY = "nodeName";
//action scope
String ACTION_CLASS_KEY = "className";
String ACTION_TYPE = "actionType";
String ACTION_METHOD_KEY = "methodName";
String PYTHON_SCRIPT_KEY = "python_script";
//navigation
String NEXT_STEP_ID_KEY = "nextStepId";
String PREVIOUS_STEP_ID_KEY = "previousStepId";
//operation scope
String EXECUTABLE_INPUTS_KEY = "executableInputs";
String BIND_OUTPUT_FROM_INPUTS_KEY = "fromInputs";
String USER_INPUTS_KEY = "userInputs";
String EXECUTABLE_OUTPUTS_KEY = "executableOutputs";
String EXECUTABLE_RESULTS_KEY = "executableResults";
//task scope
String TASK_INPUTS_KEY = "taskInputs";
String TASK_PUBLISH_KEY = "taskPublishValues";
String TASK_NAVIGATION_KEY = "taskNavigationValues";
String REF_ID = "refId";
String LOOP_KEY = "loop";
String BREAK_LOOP_KEY = "breakOn";
// async loop
String ASYNC_LOOP_KEY = "async_loop";
String ASYNC_LOOP_STATEMENT_KEY = "asyncLoopStatement";
String TASK_AGGREGATE_KEY = "taskAggregateValues";
String BRANCH_BEGIN_STEP_ID_KEY = "branchBeginStep";
// Events types
String SLANG_EXECUTION_EXCEPTION = "SLANG_EXECUTION_EXCEPTION";
String EVENT_ACTION_START = "EVENT_ACTION_START";
String EVENT_ACTION_END = "EVENT_ACTION_END";
String EVENT_ACTION_ERROR = "EVENT_ACTION_ERROR";
String EVENT_INPUT_START = "EVENT_INPUT_START";
String EVENT_INPUT_END = "EVENT_INPUT_END";
String EVENT_OUTPUT_START = "EVENT_OUTPUT_START";
String EVENT_OUTPUT_END = "EVENT_OUTPUT_END";
String EVENT_EXECUTION_FINISHED = "EVENT_EXECUTION_FINISHED";
String EVENT_BRANCH_START = "EVENT_BRANCH_START";
String EVENT_BRANCH_END = "EVENT_BRANCH_END";
String EVENT_ASYNC_LOOP_EXPRESSION_START = "EVENT_ASYNC_LOOP_EXPRESSION_START";
String EVENT_ASYNC_LOOP_EXPRESSION_END = "EVENT_ASYNC_LOOP_EXPRESSION_END";
String EVENT_ASYNC_LOOP_OUTPUT_START = "EVENT_ASYNC_LOOP_OUTPUT_START";
String EVENT_ASYNC_LOOP_OUTPUT_END = "EVENT_ASYNC_LOOP_OUTPUT_END";
// results
String SUCCESS_RESULT = "SUCCESS";
String FAILURE_RESULT = "FAILURE";
}
| apache-2.0 |
badmusamuda/PettiCash | src/main/java/com/herokuapp/petticash/User.java | 1715 | package com.herokuapp.petticash;
import javax.inject.Named;
import javax.faces.bean.ManagedBean;
import java.io.Serializable;
import javax.enterprise.context.RequestScoped;
@Named
@RequestScoped
@ManagedBean(name="user")
public class User
{
private String name , password, namePattern = "[a-zA-Z]{5,15}" , passwordPattern = "[a-zA-Z]{2}";
private String textName , textContent ,textOption;
public String getTextOption()
{
return textOption;
}
public void setTextOption(String myOption)
{
this.textOption = myOption;
}
public String getTextContent()
{
return textContent;
}
public void setTextContent(String myContent )
{
textContent = myContent;
}
public String getTextName()
{
return textName;
}
public void setTextName(String myTextName)
{
textName = myTextName;
}
public String getName()
{
return name;
}
public String getPassword()
{
return password;
}
public void setName( String newName )
{
this.name = newName;
}
public void setPassword( String newPassword )
{
this.password = newPassword;
}
public String getNamePattern()
{
return namePattern;
}
public String getPasswordPattern()
{
return passwordPattern;
}
public String toString()
{
if ( ( getTextName() == null ) || ( getTextContent() == null ) )
return "";
else
{
String output = String.format("File Name is : %s%s \n Text Content is : %s",textName.toUpperCase(),this.textOption, textContent.toLowerCase());
return output;
}
}
}
| apache-2.0 |
GovernmentCommunicationsHeadquarters/Gaffer | library/sketches-library/src/test/java/uk/gov/gchq/gaffer/sketches/datasketches/cardinality/serialisation/HllSketchSerialiserTest.java | 2469 | /*
* Copyright 2017-2020 Crown Copyright
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.gchq.gaffer.sketches.datasketches.cardinality.serialisation;
import com.yahoo.sketches.hll.HllSketch;
import org.junit.jupiter.api.Test;
import uk.gov.gchq.gaffer.exception.SerialisationException;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
public class HllSketchSerialiserTest {
private static final HllSketchSerialiser SERIALISER = new HllSketchSerialiser();
private static final double DELTA = 0.0000001D;
@Test
public void testSerialiseAndDeserialise() {
final HllSketch sketch = new HllSketch(15);
sketch.update("A");
sketch.update("B");
sketch.update("C");
testSerialiser(sketch);
final HllSketch emptySketch = new HllSketch(15);
testSerialiser(emptySketch);
}
private void testSerialiser(final HllSketch sketch) {
final double cardinality = sketch.getEstimate();
final byte[] sketchSerialised;
try {
sketchSerialised = SERIALISER.serialise(sketch);
} catch (final SerialisationException exception) {
fail("A SerialisationException occurred");
return;
}
final HllSketch sketchDeserialised;
try {
sketchDeserialised = SERIALISER.deserialise(sketchSerialised);
} catch (final SerialisationException exception) {
fail("A SerialisationException occurred");
return;
}
assertEquals(cardinality, sketchDeserialised.getEstimate(), DELTA);
}
@Test
public void testCanHandleHllSketch() {
assertTrue(SERIALISER.canHandle(HllSketch.class));
assertFalse(SERIALISER.canHandle(String.class));
}
}
| apache-2.0 |
LookThisCode/DeveloperBus | Season 2013/Brazil/Projects/AcesseMe-master/Desktop/acesseme/services/src/br/com/devbus/acesseme/model/Loja.java | 649 | package br.com.devbus.acesseme.model;
import java.io.IOException;
import java.util.List;
import br.com.devbus.acesseme.scraping.Scraping;
public class Loja{
private int tipo;
private List<Produto> produtos;
private String nome;
private Scraping scraping;
public Loja(String nome, Scraping scraping, int tipo){
this.nome = nome;
this.scraping = scraping;
this.tipo = tipo;
}
public int getTipo() {
return tipo;
}
public String getNome() {
return nome;
}
public List<Produto> getProdutos() {
return produtos;
}
public void run() throws IOException {
scraping.execute();
produtos = scraping.getProdutos();
}
}
| apache-2.0 |
GeeQuery/cxf-plus | src/main/java/com/github/cxfplus/com/sun/xml/bind/v2/runtime/unmarshaller/UnmarshallerImpl.java | 20098 | /*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 1997-2007 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package com.github.cxfplus.com.sun.xml.bind.v2.runtime.unmarshaller;
import java.io.IOException;
import java.io.InputStream;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.JAXBException;
import javax.xml.bind.PropertyException;
import javax.xml.bind.UnmarshalException;
import javax.xml.bind.Unmarshaller;
import javax.xml.bind.UnmarshallerHandler;
import javax.xml.bind.ValidationEvent;
import javax.xml.bind.ValidationEventHandler;
import javax.xml.bind.annotation.adapters.XmlAdapter;
import javax.xml.bind.attachment.AttachmentUnmarshaller;
import javax.xml.bind.helpers.AbstractUnmarshallerImpl;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.events.XMLEvent;
import javax.xml.transform.Source;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.sax.SAXSource;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Schema;
import com.github.cxfplus.com.sun.xml.bind.IDResolver;
import com.github.cxfplus.com.sun.xml.bind.api.ClassResolver;
import com.github.cxfplus.com.sun.xml.bind.unmarshaller.DOMScanner;
import com.github.cxfplus.com.sun.xml.bind.unmarshaller.InfosetScanner;
import com.github.cxfplus.com.sun.xml.bind.unmarshaller.Messages;
import com.github.cxfplus.com.sun.xml.bind.v2.runtime.AssociationMap;
import com.github.cxfplus.com.sun.xml.bind.v2.runtime.JAXBContextImpl;
import com.github.cxfplus.com.sun.xml.bind.v2.runtime.JaxBeanInfo;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
/**
* Default Unmarshaller implementation.
*
* <p>
* This class can be extended by the generated code to provide
* type-safe unmarshall methods.
*
* @author
* <a href="mailto:kohsuke.kawaguchi@sun.com">Kohsuke KAWAGUCHI</a>
*/
public final class UnmarshallerImpl extends AbstractUnmarshallerImpl implements ValidationEventHandler
{
/** Owning {@link JAXBContext} */
protected final JAXBContextImpl context;
/**
* schema which will be used to validate during calls to unmarshal
*/
private Schema schema;
public final UnmarshallingContext coordinator;
/** Unmarshaller.Listener */
private Listener externalListener;
/**
* The attachment unmarshaller used to support MTOM and swaRef.
*/
private AttachmentUnmarshaller attachmentUnmarshaller;
private IDResolver idResolver = new DefaultIDResolver();
public UnmarshallerImpl( JAXBContextImpl context, AssociationMap assoc ) {
this.context = context;
this.coordinator = new UnmarshallingContext( this, assoc );
try {
setEventHandler(this);
} catch (JAXBException e) {
throw new AssertionError(e); // impossible
}
}
public UnmarshallerHandler getUnmarshallerHandler() {
return getUnmarshallerHandler(true,null);
}
private SAXConnector getUnmarshallerHandler( boolean intern, JaxBeanInfo expectedType ) {
XmlVisitor h = createUnmarshallerHandler(null,false,expectedType);
if(intern)
h = new InterningXmlVisitor(h);
return new SAXConnector(h,null);
}
/**
* Creates and configures a new unmarshalling pipe line.
* Depending on the setting, we put a validator as a filter.
*
* @return
* A component that implements both {@link UnmarshallerHandler}
* and {@link ValidationEventHandler}. All the parsing errors
* should be reported to this error handler for the unmarshalling
* process to work correctly.
*
* Also, returned handler expects all the XML names to be interned.
*
*/
public final XmlVisitor createUnmarshallerHandler(InfosetScanner scanner, boolean inplace, JaxBeanInfo expectedType ) {
coordinator.reset(scanner,inplace,expectedType,idResolver);
XmlVisitor unmarshaller = coordinator;
// delegate to JAXP 1.3 for validation if the client provided a schema
if (schema != null)
unmarshaller = new ValidatingUnmarshaller(schema,unmarshaller);
if(attachmentUnmarshaller!=null && attachmentUnmarshaller.isXOPPackage())
unmarshaller = new MTOMDecorator(this,unmarshaller,attachmentUnmarshaller);
return unmarshaller;
}
private static final DefaultHandler dummyHandler = new DefaultHandler();
public static boolean needsInterning( XMLReader reader ) {
// attempt to set it to true, which could fail
try {
reader.setFeature("http://xml.org/sax/features/string-interning",true);
} catch (SAXException e) {
// if it fails that's fine. we'll work around on our side
}
try {
if( reader.getFeature("http://xml.org/sax/features/string-interning") )
return false; // no need for intern
} catch (SAXException e) {
// unrecognized/unsupported
}
// otherwise we need intern
return true;
}
protected Object unmarshal( XMLReader reader, InputSource source ) throws JAXBException {
return unmarshal0(reader,source,null);
}
protected <T> JAXBElement<T> unmarshal( XMLReader reader, InputSource source, Class<T> expectedType ) throws JAXBException {
if(expectedType==null)
throw new IllegalArgumentException();
return (JAXBElement)unmarshal0(reader,source,getBeanInfo(expectedType));
}
private Object unmarshal0( XMLReader reader, InputSource source, JaxBeanInfo expectedType ) throws JAXBException {
SAXConnector connector = getUnmarshallerHandler(needsInterning(reader),expectedType);
reader.setContentHandler(connector);
// saxErrorHandler will be set by the getUnmarshallerHandler method.
// configure XMLReader so that the error will be sent to it.
// This is essential for the UnmarshallerHandler to be able to abort
// unmarshalling when an error is found.
//
// Note that when this XMLReader is provided by the client code,
// it might be already configured to call a client error handler.
// This will clobber such handler, if any.
//
// Ryan noted that we might want to report errors to such a client
// error handler as well.
reader.setErrorHandler(coordinator);
try {
reader.parse(source);
} catch( IOException e ) {
throw new UnmarshalException(e);
} catch( SAXException e ) {
throw createUnmarshalException(e);
}
Object result = connector.getResult();
// avoid keeping unnecessary references too long to let the GC
// reclaim more memory.
// setting null upsets some parsers, so use a dummy instance instead.
reader.setContentHandler(dummyHandler);
reader.setErrorHandler(dummyHandler);
return result;
}
@Override
public <T> JAXBElement<T> unmarshal( Source source, Class<T> expectedType ) throws JAXBException {
if(source instanceof SAXSource) {
SAXSource ss = (SAXSource)source;
XMLReader reader = ss.getXMLReader();
if( reader == null )
reader = getXMLReader();
return unmarshal( reader, ss.getInputSource(), expectedType );
}
if(source instanceof StreamSource) {
return unmarshal( getXMLReader(), streamSourceToInputSource((StreamSource)source), expectedType );
}
if(source instanceof DOMSource)
return unmarshal( ((DOMSource)source).getNode(), expectedType );
// we don't handle other types of Source
throw new IllegalArgumentException();
}
public Object unmarshal0( Source source, JaxBeanInfo expectedType ) throws JAXBException {
if(source instanceof SAXSource) {
SAXSource ss = (SAXSource)source;
XMLReader reader = ss.getXMLReader();
if( reader == null )
reader = getXMLReader();
return unmarshal0( reader, ss.getInputSource(), expectedType );
}
if(source instanceof StreamSource) {
return unmarshal0( getXMLReader(), streamSourceToInputSource((StreamSource)source), expectedType );
}
if(source instanceof DOMSource)
return unmarshal0( ((DOMSource)source).getNode(), expectedType );
// we don't handle other types of Source
throw new IllegalArgumentException();
}
public final ValidationEventHandler getEventHandler() {
try {
return super.getEventHandler();
} catch (JAXBException e) {
// impossible
throw new AssertionError();
}
}
/**
* Returns true if an event handler is installed.
* <p>
* The default handler ignores any errors, and for that this method returns false.
*/
public final boolean hasEventHandler() {
return getEventHandler()!=this;
}
@Override
public <T> JAXBElement<T> unmarshal(Node node, Class<T> expectedType) throws JAXBException {
if(expectedType==null)
throw new IllegalArgumentException();
return (JAXBElement)unmarshal0(node,getBeanInfo(expectedType));
}
public final Object unmarshal( Node node ) throws JAXBException {
return unmarshal0(node,null);
}
// just to make the the test harness happy by making this method accessible
@Deprecated
public final Object unmarshal( SAXSource source ) throws JAXBException {
return super.unmarshal(source);
}
public final Object unmarshal0( Node node, JaxBeanInfo expectedType ) throws JAXBException {
try {
final DOMScanner scanner = new DOMScanner();
InterningXmlVisitor handler = new InterningXmlVisitor(createUnmarshallerHandler(null,false,expectedType));
scanner.setContentHandler(new SAXConnector(handler,scanner));
if(node instanceof Element)
scanner.scan((Element)node);
else
if(node instanceof Document)
scanner.scan((Document)node);
else
// no other type of input is supported
throw new IllegalArgumentException("Unexpected node type: "+node);
Object retVal = handler.getContext().getResult();
handler.getContext().clearResult();
return retVal;
} catch( SAXException e ) {
throw createUnmarshalException(e);
}
}
@Override
public Object unmarshal(XMLStreamReader reader) throws JAXBException {
return unmarshal0(reader,null);
}
@Override
public <T> JAXBElement<T> unmarshal(XMLStreamReader reader, Class<T> expectedType) throws JAXBException {
if(expectedType==null)
throw new IllegalArgumentException();
return (JAXBElement)unmarshal0(reader,getBeanInfo(expectedType));
}
public Object unmarshal0(XMLStreamReader reader, JaxBeanInfo expectedType) throws JAXBException {
if (reader == null) {
throw new IllegalArgumentException(
Messages.format(Messages.NULL_READER));
}
int eventType = reader.getEventType();
if (eventType != XMLStreamConstants.START_ELEMENT
&& eventType != XMLStreamConstants.START_DOCUMENT) {
// TODO: convert eventType into event name
throw new IllegalStateException(
Messages.format(Messages.ILLEGAL_READER_STATE,eventType));
}
XmlVisitor h = createUnmarshallerHandler(null,false,expectedType);
StAXConnector connector=StAXStreamConnector.create(reader,h);
try {
connector.bridge();
} catch (XMLStreamException e) {
throw handleStreamException(e);
}
Object retVal = h.getContext().getResult();
h.getContext().clearResult();
return retVal;
}
@Override
public <T> JAXBElement<T> unmarshal(XMLEventReader reader, Class<T> expectedType) throws JAXBException {
if(expectedType==null)
throw new IllegalArgumentException();
return (JAXBElement)unmarshal0(reader,getBeanInfo(expectedType));
}
@Override
public Object unmarshal(XMLEventReader reader) throws JAXBException {
return unmarshal0(reader,null);
}
private Object unmarshal0(XMLEventReader reader,JaxBeanInfo expectedType) throws JAXBException {
if (reader == null) {
throw new IllegalArgumentException(
Messages.format(Messages.NULL_READER));
}
try {
XMLEvent event = reader.peek();
if (!event.isStartElement() && !event.isStartDocument()) {
// TODO: convert event into event name
throw new IllegalStateException(
Messages.format(
Messages.ILLEGAL_READER_STATE,event.getEventType()));
}
// Quick hack until SJSXP fixes 6270116
boolean isZephyr = reader.getClass().getName().equals("com.sun.xml.stream.XMLReaderImpl");
XmlVisitor h = createUnmarshallerHandler(null,false,expectedType);
if(!isZephyr)
h = new InterningXmlVisitor(h);
new StAXEventConnector(reader,h).bridge();
return h.getContext().getResult();
} catch (XMLStreamException e) {
throw handleStreamException(e);
}
}
public Object unmarshal0( InputStream input, JaxBeanInfo expectedType ) throws JAXBException {
return unmarshal0(getXMLReader(),new InputSource(input),expectedType);
}
private static JAXBException handleStreamException(XMLStreamException e) {
// StAXStreamConnector wraps SAXException to XMLStreamException.
// XMLStreamException doesn't print its nested stack trace when it prints
// its stack trace, so if we wrap XMLStreamException in JAXBException,
// it becomes harder to find out the real problem.
// So we unwrap them here. But we don't want to unwrap too eagerly, because
// that could throw away some meaningful exception information.
Throwable ne = e.getNestedException();
if(ne instanceof JAXBException)
return (JAXBException)ne;
if(ne instanceof SAXException)
return new UnmarshalException(ne);
return new UnmarshalException(e);
}
public Object getProperty(String name) throws PropertyException {
if(name.equals(IDResolver.class.getName())) {
return idResolver;
}
return super.getProperty(name);
}
public void setProperty(String name, Object value) throws PropertyException {
if(name.equals(FACTORY)) {
coordinator.setFactories(value);
return;
}
if(name.equals(IDResolver.class.getName())) {
idResolver = (IDResolver)value;
return;
}
if(name.equals(ClassResolver.class.getName())) {
coordinator.classResolver = (ClassResolver)value;
return;
}
if(name.equals(ClassLoader.class.getName())) {
coordinator.classLoader = (ClassLoader)value;
return;
}
super.setProperty(name, value);
}
public static final String FACTORY = "com.sun.xml.bind.ObjectFactory";
@Override
public void setSchema(Schema schema) {
this.schema = schema;
}
@Override
public Schema getSchema() {
return schema;
}
@Override
public AttachmentUnmarshaller getAttachmentUnmarshaller() {
return attachmentUnmarshaller;
}
@Override
public void setAttachmentUnmarshaller(AttachmentUnmarshaller au) {
this.attachmentUnmarshaller = au;
}
/**
* @deprecated since 2.0
*/
@Override
public boolean isValidating() {
throw new UnsupportedOperationException();
}
/**
* @deprecated since 2.0
*/
@Override
public void setValidating(boolean validating) {
throw new UnsupportedOperationException();
}
@Override
public <A extends XmlAdapter> void setAdapter(Class<A> type, A adapter) {
if(type==null)
throw new IllegalArgumentException();
coordinator.putAdapter(type,adapter);
}
@Override
public <A extends XmlAdapter> A getAdapter(Class<A> type) {
if(type==null)
throw new IllegalArgumentException();
if(coordinator.containsAdapter(type))
// so as not to create a new instance when this method is called
return coordinator.getAdapter(type);
else
return null;
}
// opening up for public use
public UnmarshalException createUnmarshalException( SAXException e ) {
return super.createUnmarshalException(e);
}
/**
* Default error handling behavior fot {@link Unmarshaller}.
*/
public boolean handleEvent(ValidationEvent event) {
return event.getSeverity()!=ValidationEvent.FATAL_ERROR;
}
private static InputSource streamSourceToInputSource( StreamSource ss ) {
InputSource is = new InputSource();
is.setSystemId( ss.getSystemId() );
is.setByteStream( ss.getInputStream() );
is.setCharacterStream( ss.getReader() );
return is;
}
public <T> JaxBeanInfo<T> getBeanInfo(Class<T> clazz) throws JAXBException {
return context.getBeanInfo(clazz,true);
}
@Override
public Listener getListener() {
return externalListener;
}
@Override
public void setListener(Listener listener) {
externalListener = listener;
}
public UnmarshallingContext getContext() {
return coordinator;
}
}
| apache-2.0 |
zofuthan/kaa | server/appenders/flume/appender/src/test/java/org/kaaproject/kaa/server/appenders/flume/appender/FlumeLogAppenderTest.java | 7541 | /*
* Copyright 2014 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.appenders.flume.appender;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.kaaproject.kaa.common.avro.AvroByteArrayConverter;
import org.kaaproject.kaa.common.dto.logs.LogAppenderDto;
import org.kaaproject.kaa.server.appenders.flume.appender.client.FlumeClientManager;
import org.kaaproject.kaa.server.appenders.flume.appender.client.async.AppendBatchAsyncResultPojo;
import org.kaaproject.kaa.server.appenders.flume.config.gen.FlumeConfig;
import org.kaaproject.kaa.server.appenders.flume.config.gen.FlumeEventFormat;
import org.kaaproject.kaa.server.appenders.flume.config.gen.FlumeNode;
import org.kaaproject.kaa.server.appenders.flume.config.gen.FlumeNodes;
import org.kaaproject.kaa.server.common.log.shared.appender.LogDeliveryCallback;
import org.kaaproject.kaa.server.common.log.shared.appender.LogEventPack;
import org.kaaproject.kaa.server.common.log.shared.avro.gen.RecordHeader;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.test.util.ReflectionTestUtils;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
public class FlumeLogAppenderTest {
private static final Logger LOG = LoggerFactory.getLogger(FlumeLogAppenderTest.class);
private static final String APPLICATION_ID = "application_id";
private static final String APPENDER_ID = "appender_id";
private static final String APPENDER_NAME = "appender_name";
private FlumeLogAppender appender;
private FlumeEventBuilder flumeEventBuilder;
private FlumeClientManager<?> flumeClientManager;
private ExecutorService executor;
private ExecutorService callbackExecutor;
@Before
public void before() throws Exception {
appender = new FlumeLogAppender();
appender.setName(APPENDER_NAME);
appender.setAppenderId(APPENDER_ID);
flumeEventBuilder = mock(FlumeEventBuilder.class);
flumeClientManager = mock(FlumeClientManager.class);
executor = Executors.newFixedThreadPool(1);
callbackExecutor = Executors.newFixedThreadPool(1);
ReflectionTestUtils.setField(appender, "flumeEventBuilder", flumeEventBuilder);
ReflectionTestUtils.setField(appender, "flumeClientManager", flumeClientManager);
ReflectionTestUtils.setField(appender, "executor", executor);
ReflectionTestUtils.setField(appender, "callbackExecutor", callbackExecutor);
}
@Test
public void initTest() throws IOException {
LOG.debug("Init test for appender name: {}, id: {}", appender.getName(), appender.getAppenderId());
LogAppenderDto logAppender = new LogAppenderDto();
logAppender.setApplicationId(APPLICATION_ID);
logAppender.setId(APPENDER_ID);
FlumeNodes nodes = FlumeNodes.newBuilder()
.setFlumeNodes(Arrays.asList(new FlumeNode("localhost", 12121), new FlumeNode("localhost", 12122)))
.build();
FlumeConfig flumeConfig = FlumeConfig.newBuilder().setFlumeEventFormat(FlumeEventFormat.RECORDS_CONTAINER)
.setHostsBalancing(nodes).setExecutorThreadPoolSize(2).setCallbackThreadPoolSize(2)
.setClientsThreadPoolSize(2).build();
AvroByteArrayConverter<FlumeConfig> converter = new AvroByteArrayConverter<>(FlumeConfig.class);
byte[] rawConfiguration = converter.toByteArray(flumeConfig);
logAppender.setRawConfiguration(rawConfiguration);
appender.init(logAppender);
}
@Test
public void appendWithExceptionTest() throws EventDeliveryException, InterruptedException {
LogEventPack eventPack = new LogEventPack();
Mockito.when(
flumeEventBuilder.generateEvents(Mockito.any(LogEventPack.class), Mockito.any(RecordHeader.class),
Mockito.anyString())).thenReturn(Collections.singletonList(Mockito.mock(Event.class)));
doThrow(new EventDeliveryException()).when(flumeClientManager).sendEventsToFlumeAsync(Mockito.anyList());
TestLogDeliveryCallback callback = new TestLogDeliveryCallback();
appender.doAppend(eventPack, callback);
Thread.sleep(3000);
Assert.assertTrue(callback.connectionError);
}
@Test
public void appendTest() throws EventDeliveryException, InterruptedException {
LogEventPack eventPack = new LogEventPack();
ListeningExecutorService rpcES = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(1));
Mockito.when(
flumeEventBuilder.generateEvents(Mockito.any(LogEventPack.class), Mockito.any(RecordHeader.class),
Mockito.anyString())).thenReturn(Collections.singletonList(Mockito.mock(Event.class)));
Mockito.when(flumeClientManager.sendEventsToFlumeAsync(Mockito.anyList()))
.thenReturn(rpcES.submit(new Callable<AppendBatchAsyncResultPojo>() {
public AppendBatchAsyncResultPojo call() throws Exception {
return new AppendBatchAsyncResultPojo(true,
Collections.singletonList(Mockito.mock(Event.class)));
}
}));
TestLogDeliveryCallback callback = new TestLogDeliveryCallback();
appender.doAppend(eventPack, callback);
Thread.sleep(3000);
Assert.assertTrue(callback.success);
}
@Test
public void appendWithEmptyClientManagerTest() throws EventDeliveryException {
LogEventPack eventPack = new LogEventPack();
ReflectionTestUtils.setField(appender, "flumeClientManager", null);
TestLogDeliveryCallback callback = new TestLogDeliveryCallback();
appender.doAppend(eventPack, callback);
Assert.assertTrue(callback.internallError);
}
private static class TestLogDeliveryCallback implements LogDeliveryCallback {
private volatile boolean success;
private volatile boolean internallError;
private volatile boolean connectionError;
private volatile boolean remoteError;
@Override
public void onSuccess() {
success = true;
}
@Override
public void onInternalError() {
internallError = true;
}
@Override
public void onConnectionError() {
connectionError = true;
}
@Override
public void onRemoteError() {
remoteError = true;
}
}
}
| apache-2.0 |
xasx/assertj-core | src/test/java/org/assertj/core/api/bigdecimal/BigDecimalAssert_isZero_Test.java | 1213 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2019 the original author or authors.
*/
package org.assertj.core.api.bigdecimal;
import org.assertj.core.api.BigDecimalAssert;
import org.assertj.core.api.BigDecimalAssertBaseTest;
import static org.mockito.Mockito.verify;
/**
* Tests for <code>{@link BigDecimalAssert#isZero()}</code>.
*
* @author Yvonne Wang
*/
public class BigDecimalAssert_isZero_Test extends BigDecimalAssertBaseTest {
@Override
protected BigDecimalAssert invoke_api_method() {
return assertions.isZero();
}
@Override
protected void verify_internal_effects() {
verify(bigDecimals).assertIsZero(getInfo(assertions), getActual(assertions));
}
}
| apache-2.0 |
johnzeringue/ET_Redux | src/main/java/org/earthtime/UPb_Redux/valueModels/definedValueModels/ConcPb206_ib.java | 2750 | /*
* ConcPb206_ib.java
*
* Created on Feb 28, 2009
*
*
* Copyright 2006-2015 James F. Bowring and www.Earth-Time.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.UPb_Redux.valueModels.definedValueModels;
import java.io.Serializable;
import java.math.BigDecimal;
import java.util.concurrent.ConcurrentMap;
import org.earthtime.UPb_Redux.ReduxConstants;
import org.earthtime.UPb_Redux.expressions.ExpTreeII;
import org.earthtime.UPb_Redux.valueModels.ValueModel;
/**
*
* @author James F. Bowring
*/
public class ConcPb206_ib extends ValueModel implements
Comparable<ValueModel>,
Serializable {
// Class variables
private static final long serialVersionUID = -3412851419609564860L;
private final static String NAME = "concPb206_ib";
private final static String UNCT_TYPE = "ABS";
// Instance variables
private ValueModel gmol206;
private ValueModel molPb206b;
private ValueModel molPb206c;
private ValueModel fractionMass;
/** Creates a new instance of ConcPb206_ib */
public ConcPb206_ib () {
super( NAME, UNCT_TYPE );
}
/**
*
* @param inputValueModels
* @param parDerivTerms
*/
@Override
public void calculateValue (
ValueModel[] inputValueModels,
ConcurrentMap<String, BigDecimal> parDerivTerms ) {
gmol206 = inputValueModels[0];
molPb206b = inputValueModels[1];
molPb206c = inputValueModels[2];
fractionMass = inputValueModels[3];
try {
setValue(gmol206.getValue().//
multiply( molPb206b.getValue().//
add( molPb206c.getValue() ) ).//
divide(fractionMass.getValue(), ReduxConstants.mathContext15 ) );
} catch (Exception e) {
setValue( BigDecimal.ZERO );
}
try {
setValueTree(//
gmol206.getValueTree().//
multiply( molPb206b.getValueTree().//
add( molPb206c.getValueTree() ) ).//
divide( fractionMass.getValueTree() ) );
} catch (Exception e) {
setValueTree( ExpTreeII.ZERO );
}
}
}
| apache-2.0 |
OnurKirkizoglu/master_thesis | at.jku.sea.cloud/src/main/java/at/jku/sea/cloud/stream/sink/ShortCircuitSink.java | 474 | package at.jku.sea.cloud.stream.sink;
import at.jku.sea.cloud.stream.predicate.Predicate;
public abstract class ShortCircuitSink<T> implements Sink<T> {
protected boolean isCancelled = false;
protected Predicate<T> predicate;
protected final String context;
public ShortCircuitSink(String context, Predicate<T> predicate) {
this.context = context;
this.predicate = predicate;
}
@Override
public boolean isCancelled() {
return isCancelled;
}
} | apache-2.0 |
ict-carch/hadoop-plus | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java | 18943 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.jobhistory;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.Calendar;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class JobHistoryUtils {
/**
* Permissions for the history staging dir while JobInProgress.
*/
public static final FsPermission HISTORY_STAGING_DIR_PERMISSIONS =
FsPermission.createImmutable( (short) 0700);
/**
* Permissions for the user directory under the staging directory.
*/
public static final FsPermission HISTORY_STAGING_USER_DIR_PERMISSIONS =
FsPermission.createImmutable((short) 0700);
/**
* Permissions for the history done dir and derivatives.
*/
public static final FsPermission HISTORY_DONE_DIR_PERMISSION =
FsPermission.createImmutable((short) 0770);
public static final FsPermission HISTORY_DONE_FILE_PERMISSION =
FsPermission.createImmutable((short) 0770); // rwx------
/**
* Umask for the done dir and derivatives.
*/
public static final FsPermission HISTORY_DONE_DIR_UMASK = FsPermission
.createImmutable((short) (0770 ^ 0777));
/**
* Permissions for the intermediate done directory.
*/
public static final FsPermission HISTORY_INTERMEDIATE_DONE_DIR_PERMISSIONS =
FsPermission.createImmutable((short) 01777);
/**
* Permissions for the user directory under the intermediate done directory.
*/
public static final FsPermission HISTORY_INTERMEDIATE_USER_DIR_PERMISSIONS =
FsPermission.createImmutable((short) 0770);
public static final FsPermission HISTORY_INTERMEDIATE_FILE_PERMISSIONS =
FsPermission.createImmutable((short) 0770); // rwx------
/**
* Suffix for configuration files.
*/
public static final String CONF_FILE_NAME_SUFFIX = "_conf.xml";
/**
* Suffix for summary files.
*/
public static final String SUMMARY_FILE_NAME_SUFFIX = ".summary";
/**
* Job History File extension.
*/
public static final String JOB_HISTORY_FILE_EXTENSION = ".jhist";
public static final int VERSION = 4;
public static final int SERIAL_NUMBER_DIRECTORY_DIGITS = 6;
public static final String TIMESTAMP_DIR_REGEX = "\\d{4}" + "\\" + Path.SEPARATOR + "\\d{2}" + "\\" + Path.SEPARATOR + "\\d{2}";
public static final Pattern TIMESTAMP_DIR_PATTERN = Pattern.compile(TIMESTAMP_DIR_REGEX);
private static final String TIMESTAMP_DIR_FORMAT = "%04d" + File.separator + "%02d" + File.separator + "%02d";
private static final Splitter ADDR_SPLITTER = Splitter.on(':').trimResults();
private static final Joiner JOINER = Joiner.on("");
private static final PathFilter CONF_FILTER = new PathFilter() {
@Override
public boolean accept(Path path) {
return path.getName().endsWith(CONF_FILE_NAME_SUFFIX);
}
};
private static final PathFilter JOB_HISTORY_FILE_FILTER = new PathFilter() {
@Override
public boolean accept(Path path) {
return path.getName().endsWith(JOB_HISTORY_FILE_EXTENSION);
}
};
/**
* Checks whether the provided path string is a valid job history file.
* @param pathString the path to be checked.
* @return true is the path is a valid job history filename else return false
*/
public static boolean isValidJobHistoryFileName(String pathString) {
return pathString.endsWith(JOB_HISTORY_FILE_EXTENSION);
}
/**
* Returns the jobId from a job history file name.
* @param pathString the path string.
* @return the JobId
* @throws IOException if the filename format is invalid.
*/
public static JobID getJobIDFromHistoryFilePath(String pathString) throws IOException {
String [] parts = pathString.split(Path.SEPARATOR);
String fileNamePart = parts[parts.length -1];
JobIndexInfo jobIndexInfo = FileNameIndexUtils.getIndexInfo(fileNamePart);
return TypeConverter.fromYarn(jobIndexInfo.getJobId());
}
/**
* Gets a PathFilter which would match configuration files.
* @return the patch filter {@link PathFilter} for matching conf files.
*/
public static PathFilter getConfFileFilter() {
return CONF_FILTER;
}
/**
* Gets a PathFilter which would match job history file names.
* @return the path filter {@link PathFilter} matching job history files.
*/
public static PathFilter getHistoryFileFilter() {
return JOB_HISTORY_FILE_FILTER;
}
/**
* Gets the configured directory prefix for In Progress history files.
* @param conf the configuration for hte job
* @param jobId the id of the job the history file is for.
* @return A string representation of the prefix.
*/
public static String
getConfiguredHistoryStagingDirPrefix(Configuration conf, String jobId)
throws IOException {
String user = UserGroupInformation.getCurrentUser().getShortUserName();
Path stagingPath = MRApps.getStagingAreaDir(conf, user);
Path path = new Path(stagingPath, jobId);
String logDir = path.toString();
return logDir;
}
/**
* Gets the configured directory prefix for intermediate done history files.
* @param conf
* @return A string representation of the prefix.
*/
public static String getConfiguredHistoryIntermediateDoneDirPrefix(
Configuration conf) {
String doneDirPrefix = conf
.get(JHAdminConfig.MR_HISTORY_INTERMEDIATE_DONE_DIR);
if (doneDirPrefix == null) {
doneDirPrefix = conf.get(MRJobConfig.MR_AM_STAGING_DIR,
MRJobConfig.DEFAULT_MR_AM_STAGING_DIR)
+ "/history/done_intermediate";
}
return doneDirPrefix;
}
/**
* Gets the configured directory prefix for Done history files.
* @param conf the configuration object
* @return the done history directory
*/
public static String getConfiguredHistoryServerDoneDirPrefix(
Configuration conf) {
String doneDirPrefix = conf.get(JHAdminConfig.MR_HISTORY_DONE_DIR);
if (doneDirPrefix == null) {
doneDirPrefix = conf.get(MRJobConfig.MR_AM_STAGING_DIR,
MRJobConfig.DEFAULT_MR_AM_STAGING_DIR)
+ "/history/done";
}
return doneDirPrefix;
}
/**
* Gets the user directory for intermediate done history files.
* @param conf the configuration object
* @return the intermediate done directory for jobhistory files.
*/
public static String getHistoryIntermediateDoneDirForUser(Configuration conf) throws IOException {
return getConfiguredHistoryIntermediateDoneDirPrefix(conf) + File.separator
+ UserGroupInformation.getCurrentUser().getShortUserName();
}
public static boolean shouldCreateNonUserDirectory(Configuration conf) {
// Returning true by default to allow non secure single node clusters to work
// without any configuration change.
return conf.getBoolean(MRJobConfig.MR_AM_CREATE_JH_INTERMEDIATE_BASE_DIR, true);
}
/**
* Get the job history file path for non Done history files.
*/
public static Path getStagingJobHistoryFile(Path dir, JobId jobId, int attempt) {
return getStagingJobHistoryFile(dir, TypeConverter.fromYarn(jobId).toString(), attempt);
}
/**
* Get the job history file path for non Done history files.
*/
public static Path getStagingJobHistoryFile(Path dir, String jobId, int attempt) {
return new Path(dir, jobId + "_" +
attempt + JOB_HISTORY_FILE_EXTENSION);
}
/**
* Get the done configuration file name for a job.
* @param jobId the jobId.
* @return the conf file name.
*/
public static String getIntermediateConfFileName(JobId jobId) {
return TypeConverter.fromYarn(jobId).toString() + CONF_FILE_NAME_SUFFIX;
}
/**
* Get the done summary file name for a job.
* @param jobId the jobId.
* @return the conf file name.
*/
public static String getIntermediateSummaryFileName(JobId jobId) {
return TypeConverter.fromYarn(jobId).toString() + SUMMARY_FILE_NAME_SUFFIX;
}
/**
* Gets the conf file path for jobs in progress.
*
* @param logDir the log directory prefix.
* @param jobId the jobId.
* @param attempt attempt number for this job.
* @return the conf file path for jobs in progress.
*/
public static Path getStagingConfFile(Path logDir, JobId jobId, int attempt) {
Path jobFilePath = null;
if (logDir != null) {
jobFilePath = new Path(logDir, TypeConverter.fromYarn(jobId).toString()
+ "_" + attempt + CONF_FILE_NAME_SUFFIX);
}
return jobFilePath;
}
/**
* Gets the serial number part of the path based on the jobId and serialNumber format.
* @param id
* @param serialNumberFormat
* @return the serial number part of the patch based on the jobId and serial number format.
*/
public static String serialNumberDirectoryComponent(JobId id, String serialNumberFormat) {
return String.format(serialNumberFormat,
Integer.valueOf(jobSerialNumber(id))).substring(0,
SERIAL_NUMBER_DIRECTORY_DIGITS);
}
/**Extracts the timstamp component from the path.
* @param path
* @return the timestamp component from the path
*/
public static String getTimestampPartFromPath(String path) {
Matcher matcher = TIMESTAMP_DIR_PATTERN.matcher(path);
if (matcher.find()) {
String matched = matcher.group();
String ret = matched.intern();
return ret;
} else {
return null;
}
}
/**
* Gets the history subdirectory based on the jobId, timestamp and serial number format.
* @param id
* @param timestampComponent
* @param serialNumberFormat
* @return the history sub directory based on the jobid, timestamp and serial number format
*/
public static String historyLogSubdirectory(JobId id, String timestampComponent, String serialNumberFormat) {
// String result = LOG_VERSION_STRING;
String result = "";
String serialNumberDirectory = serialNumberDirectoryComponent(id, serialNumberFormat);
result = result
+ timestampComponent
+ File.separator + serialNumberDirectory
+ File.separator;
return result;
}
/**
* Gets the timestamp component based on millisecond time.
* @param millisecondTime
* @return the timestamp component based on millisecond time
*/
public static String timestampDirectoryComponent(long millisecondTime) {
Calendar timestamp = Calendar.getInstance();
timestamp.setTimeInMillis(millisecondTime);
String dateString = null;
dateString = String
.format(TIMESTAMP_DIR_FORMAT,
timestamp.get(Calendar.YEAR),
// months are 0-based in Calendar, but people will expect January to
// be month #1.
timestamp.get(Calendar.MONTH) + 1,
timestamp.get(Calendar.DAY_OF_MONTH));
dateString = dateString.intern();
return dateString;
}
public static String doneSubdirsBeforeSerialTail() {
// date
String result = "/*/*/*"; // YYYY/MM/DD ;
return result;
}
/**
* Computes a serial number used as part of directory naming for the given jobId.
* @param id the jobId.
* @return the serial number used as part of directory naming for the given jobid
*/
public static int jobSerialNumber(JobId id) {
return id.getId();
}
public static List<FileStatus> localGlobber(FileContext fc, Path root, String tail)
throws IOException {
return localGlobber(fc, root, tail, null);
}
public static List<FileStatus> localGlobber(FileContext fc, Path root, String tail,
PathFilter filter) throws IOException {
return localGlobber(fc, root, tail, filter, null);
}
// hasMismatches is just used to return a second value if you want
// one. I would have used MutableBoxedBoolean if such had been provided.
public static List<FileStatus> localGlobber(FileContext fc, Path root, String tail,
PathFilter filter, AtomicBoolean hasFlatFiles) throws IOException {
if (tail.equals("")) {
return (listFilteredStatus(fc, root, filter));
}
if (tail.startsWith("/*")) {
Path[] subdirs = filteredStat2Paths(
remoteIterToList(fc.listStatus(root)), true, hasFlatFiles);
List<List<FileStatus>> subsubdirs = new LinkedList<List<FileStatus>>();
int subsubdirCount = 0;
if (subdirs.length == 0) {
return new LinkedList<FileStatus>();
}
String newTail = tail.substring(2);
for (int i = 0; i < subdirs.length; ++i) {
subsubdirs.add(localGlobber(fc, subdirs[i], newTail, filter, null));
// subsubdirs.set(i, localGlobber(fc, subdirs[i], newTail, filter,
// null));
subsubdirCount += subsubdirs.get(i).size();
}
List<FileStatus> result = new LinkedList<FileStatus>();
for (int i = 0; i < subsubdirs.size(); ++i) {
result.addAll(subsubdirs.get(i));
}
return result;
}
if (tail.startsWith("/")) {
int split = tail.indexOf('/', 1);
if (split < 0) {
return listFilteredStatus(fc, new Path(root, tail.substring(1)), filter);
} else {
String thisSegment = tail.substring(1, split);
String newTail = tail.substring(split);
return localGlobber(fc, new Path(root, thisSegment), newTail, filter,
hasFlatFiles);
}
}
IOException e = new IOException("localGlobber: bad tail");
throw e;
}
private static List<FileStatus> listFilteredStatus(FileContext fc, Path root,
PathFilter filter) throws IOException {
List<FileStatus> fsList = remoteIterToList(fc.listStatus(root));
if (filter == null) {
return fsList;
} else {
List<FileStatus> filteredList = new LinkedList<FileStatus>();
for (FileStatus fs : fsList) {
if (filter.accept(fs.getPath())) {
filteredList.add(fs);
}
}
return filteredList;
}
}
private static List<FileStatus> remoteIterToList(
RemoteIterator<FileStatus> rIter) throws IOException {
List<FileStatus> fsList = new LinkedList<FileStatus>();
if (rIter == null)
return fsList;
while (rIter.hasNext()) {
fsList.add(rIter.next());
}
return fsList;
}
// hasMismatches is just used to return a second value if you want
// one. I would have used MutableBoxedBoolean if such had been provided.
private static Path[] filteredStat2Paths(List<FileStatus> stats, boolean dirs,
AtomicBoolean hasMismatches) {
int resultCount = 0;
if (hasMismatches == null) {
hasMismatches = new AtomicBoolean(false);
}
for (int i = 0; i < stats.size(); ++i) {
if (stats.get(i).isDirectory() == dirs) {
stats.set(resultCount++, stats.get(i));
} else {
hasMismatches.set(true);
}
}
Path[] result = new Path[resultCount];
for (int i = 0; i < resultCount; i++) {
result[i] = stats.get(i).getPath();
}
return result;
}
public static String getHistoryUrl(Configuration conf, ApplicationId appId)
throws UnknownHostException {
//construct the history url for job
String addr = conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS);
Iterator<String> it = ADDR_SPLITTER.split(addr).iterator();
it.next(); // ignore the bind host
String port = it.next();
// Use hs address to figure out the host for webapp
addr = conf.get(JHAdminConfig.MR_HISTORY_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_ADDRESS);
String host = ADDR_SPLITTER.split(addr).iterator().next();
String hsAddress = JOINER.join(host, ":", port);
InetSocketAddress address = NetUtils.createSocketAddr(
hsAddress, JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_PORT,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS);
StringBuffer sb = new StringBuffer();
if (address.getAddress().isAnyLocalAddress() ||
address.getAddress().isLoopbackAddress()) {
sb.append(InetAddress.getLocalHost().getCanonicalHostName());
} else {
sb.append(address.getHostName());
}
sb.append(":").append(address.getPort());
sb.append("/jobhistory/job/");
JobID jobId = TypeConverter.fromYarn(appId);
sb.append(jobId.toString());
return sb.toString();
}
public static Path getPreviousJobHistoryPath(
Configuration conf, ApplicationAttemptId applicationAttemptId)
throws IOException {
String jobId =
TypeConverter.fromYarn(applicationAttemptId.getApplicationId())
.toString();
String jobhistoryDir =
JobHistoryUtils.getConfiguredHistoryStagingDirPrefix(conf, jobId);
Path histDirPath = FileContext.getFileContext(conf).makeQualified(
new Path(jobhistoryDir));
FileContext fc = FileContext.getFileContext(histDirPath.toUri(), conf);
return fc.makeQualified(JobHistoryUtils.getStagingJobHistoryFile(
histDirPath,jobId, (applicationAttemptId.getAttemptId() - 1)));
}
}
| apache-2.0 |
brendandouglas/intellij | base/src/com/google/idea/blaze/base/sync/GenericSourceFolderProvider.java | 1680 | /*
* Copyright 2016 The Bazel Authors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.idea.blaze.base.sync;
import com.google.common.collect.ImmutableMap;
import com.google.idea.blaze.base.util.UrlUtil;
import com.intellij.openapi.roots.ContentEntry;
import com.intellij.openapi.roots.SourceFolder;
import java.io.File;
/** An implementation of {@link SourceFolderProvider} with no language-specific settings. */
public class GenericSourceFolderProvider implements SourceFolderProvider {
public static final GenericSourceFolderProvider INSTANCE = new GenericSourceFolderProvider();
private GenericSourceFolderProvider() {}
@Override
public ImmutableMap<File, SourceFolder> initializeSourceFolders(ContentEntry contentEntry) {
String url = contentEntry.getUrl();
return ImmutableMap.of(UrlUtil.urlToFile(url), contentEntry.addSourceFolder(url, false));
}
@Override
public SourceFolder setSourceFolderForLocation(
ContentEntry contentEntry, SourceFolder parentFolder, File file, boolean isTestSource) {
return contentEntry.addSourceFolder(UrlUtil.fileToIdeaUrl(file), isTestSource);
}
}
| apache-2.0 |
Hanmourang/Pinot | pinot-transport/src/test/java/com/linkedin/pinot/routing/builder/BalancedRandomRoutingTableBuilderTest.java | 2824 | /**
* Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.routing.builder;
import com.linkedin.pinot.routing.ServerToSegmentSetMap;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.helix.model.ExternalView;
import org.apache.helix.model.InstanceConfig;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Test that random routing tables are really random.
*/
public class BalancedRandomRoutingTableBuilderTest {
@Test
public void isRandom() {
// Build dummy external view
BalancedRandomRoutingTableBuilder routingTableBuilder = new BalancedRandomRoutingTableBuilder();
List<InstanceConfig> instanceConfigList = new ArrayList<>();
ExternalView externalView = new ExternalView("dummy");
externalView.setState("segment_1", "Server_1.2.3.4_1234", "ONLINE");
externalView.setState("segment_1", "Server_1.2.3.5_2345", "ONLINE");
externalView.setState("segment_1", "Server_1.2.3.6_3456", "ONLINE");
externalView.setState("segment_2", "Server_1.2.3.4_1234", "ONLINE");
externalView.setState("segment_2", "Server_1.2.3.5_2345", "ONLINE");
externalView.setState("segment_2", "Server_1.2.3.6_3456", "ONLINE");
externalView.setState("segment_3", "Server_1.2.3.4_1234", "ONLINE");
externalView.setState("segment_3", "Server_1.2.3.5_2345", "ONLINE");
externalView.setState("segment_3", "Server_1.2.3.6_3456", "ONLINE");
// Build routing table
List<ServerToSegmentSetMap> routingTable =
routingTableBuilder.computeRoutingTableFromExternalView("dummy", externalView, instanceConfigList);
// Check that at least two routing tables are different
Iterator<ServerToSegmentSetMap> routingTableIterator = routingTable.iterator();
ServerToSegmentSetMap previous = routingTableIterator.next();
while (routingTableIterator.hasNext()) {
ServerToSegmentSetMap current = routingTableIterator.next();
System.out.println("current = " + current);
System.out.println("previous = " + previous);
if (!current.equals(previous)) {
// Routing tables differ, test is successful
return;
}
}
Assert.fail("All routing tables are equal!");
}
}
| apache-2.0 |
twitter/bookkeeper | bookkeeper-server/src/main/java/org/apache/bookkeeper/util/MathUtils.java | 3637 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.bookkeeper.util;
import java.util.concurrent.TimeUnit;
/**
* Provides misc math functions that don't come standard
*/
public class MathUtils {
private static final long NANOSECONDS_PER_MILLISECOND = 1000000;
public static int signSafeMod(long dividend, int divisor) {
int mod = (int) (dividend % divisor);
if (mod < 0) {
mod += divisor;
}
return mod;
}
/**
* Current time from some arbitrary time base in the past, counting in
* milliseconds, and not affected by settimeofday or similar system clock
* changes. This is appropriate to use when computing how much longer to
* wait for an interval to expire.
*
* NOTE: only use it for measuring.
* http://docs.oracle.com/javase/1.5.0/docs/api/java/lang/System.html#nanoTime%28%29
*
* @return current time in milliseconds.
*/
public static long now() {
return System.nanoTime() / NANOSECONDS_PER_MILLISECOND;
}
/**
* Current time from some arbitrary time base in the past, counting in
* nanoseconds, and not affected by settimeofday or similar system clock
* changes. This is appropriate to use when computing how much longer to
* wait for an interval to expire.
*
* NOTE: only use it for measuring.
* http://docs.oracle.com/javase/1.5.0/docs/api/java/lang/System.html#nanoTime%28%29
*
* @return current time in nanoseconds.
*/
public static long nowInNano() {
return System.nanoTime();
}
/**
* Milliseconds elapsed since the time specified, the input is nanoTime
* the only conversion happens when computing the elapsed time
*
* @param startNanoTime the start of the interval that we are measuring
* @return elapsed time in milliseconds.
*/
public static long elapsedMSec (long startNanoTime) {
return (System.nanoTime() - startNanoTime)/ NANOSECONDS_PER_MILLISECOND;
}
/**
* Microseconds elapsed since the time specified, the input is nanoTime
* the only conversion happens when computing the elapsed time
*
* @param startNanoTime the start of the interval that we are measuring
* @return elapsed time in milliseconds.
*/
public static long elapsedMicroSec(long startNanoTime) {
return TimeUnit.NANOSECONDS.toMicros(System.nanoTime() - startNanoTime);
}
/**
* Nanoseconds elapsed since the time specified, the input is nanoTime
* the only conversion happens when computing the elapsed time
*
* @param startNanoTime the start of the interval that we are measuring
* @return elapsed time in milliseconds.
*/
public static long elapsedNanos(long startNanoTime) {
return System.nanoTime() - startNanoTime;
}
}
| apache-2.0 |
wolfboys/opencron-dev | jobx-server/src/main/java/com/jobxhub/server/session/cached/MemcachedManager.java | 6856 | /**
* Copyright (c) 2015 The JobX Project
* <p>
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.jobxhub.server.session.cached;
import java.io.IOException;
import java.io.OutputStream;
import java.net.SocketAddress;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import net.spy.memcached.ConnectionObserver;
import net.spy.memcached.MemcachedClient;
import net.spy.memcached.transcoders.Transcoder;
/**
* @author benjobs
*/
public class MemcachedManager implements CachedManager {
public static final int DEFAULT_TIMEOUT = 5;
public static final TimeUnit DEFAULT_TIMEUNIT = TimeUnit.SECONDS;
private MemcachedClient memcachedClient;
private int expire;
public void addObserver(ConnectionObserver obs) {
memcachedClient.addObserver(obs);
}
public void removeObserver(ConnectionObserver obs) {
memcachedClient.removeObserver(obs);
}
@Override
public <T> T get(Object key, Class<T> clazz) {
return (T) memcachedClient.get(key.toString());
}
@Override
public void delete(Object key) {
memcachedClient.delete(key.toString());
}
@Override
public void set(Object key, Object object) {
memcachedClient.set(key.toString(), this.expire, object);
}
@Override
public <T> T remove(Object key, Class<T> clazz) {
T t = (T) memcachedClient.get(key.toString());
this.delete(key);
return t;
}
public Object asyncGet(String key) {
Object obj = null;
Future<Object> f = memcachedClient.asyncGet(key);
try {
obj = f.get(DEFAULT_TIMEOUT, DEFAULT_TIMEUNIT);
} catch (Exception e) {
f.cancel(false);
}
return obj;
}
public boolean replace(String key, Object value) {
Future<Boolean> f = memcachedClient.replace(key, expire, value);
return getBooleanValue(f);
}
public boolean flush() {
Future<Boolean> f = memcachedClient.flush();
return getBooleanValue(f);
}
public Map<String, Object> getMulti(Collection<String> keys) {
return memcachedClient.getBulk(keys);
}
public Map<String, Object> getMulti(String[] keys) {
return memcachedClient.getBulk(keys);
}
public Map<String, Object> asyncGetMulti(Collection<String> keys) {
Map<String, Object> map = null;
Future<Map<String, Object>> f = memcachedClient.asyncGetBulk(keys);
try {
map = f.get(DEFAULT_TIMEOUT, DEFAULT_TIMEUNIT);
} catch (Exception e) {
f.cancel(false);
}
return map;
}
public Map<String, Object> asyncGetMulti(String[] keys) {
Map<String, Object> map = null;
Future<Map<String, Object>> f = memcachedClient.asyncGetBulk(keys);
try {
map = f.get(DEFAULT_TIMEOUT, DEFAULT_TIMEUNIT);
} catch (Exception e) {
f.cancel(false);
}
return map;
}
// ---- increment & decrement Start ----//
public long increment(String key, int by, long defaultValue, int expire) {
return memcachedClient.incr(key, by, defaultValue, expire);
}
public long increment(String key, int by) {
return memcachedClient.incr(key, by);
}
public long decrement(String key, int by, long defaultValue, int expire) {
return memcachedClient.decr(key, by, defaultValue, expire);
}
public long decrement(String key, int by) {
return memcachedClient.decr(key, by);
}
public long asyncIncrement(String key, int by) {
Future<Long> f = memcachedClient.asyncIncr(key, by);
return getLongValue(f);
}
public long asyncDecrement(String key, int by) {
Future<Long> f = memcachedClient.asyncDecr(key, by);
return getLongValue(f);
}
// ---- increment & decrement End ----//
public void printStats() throws IOException {
printStats(null);
}
public void printStats(OutputStream stream) throws IOException {
Map<SocketAddress, Map<String, String>> statMap = memcachedClient.getStats();
if (stream == null) {
stream = System.out;
}
StringBuffer buf = new StringBuffer();
Set<SocketAddress> addrSet = statMap.keySet();
Iterator<SocketAddress> iter = addrSet.iterator();
while (iter.hasNext()) {
SocketAddress addr = iter.next();
buf.append(addr.toString() + "/n");
Map<String, String> stat = statMap.get(addr);
Set<String> keys = stat.keySet();
Iterator<String> keyIter = keys.iterator();
while (keyIter.hasNext()) {
String key = keyIter.next();
String value = stat.get(key);
buf.append(" key=" + key + ";value=" + value + "/n");
}
buf.append("/n");
}
stream.write(buf.toString().getBytes());
stream.flush();
}
public Transcoder getTranscoder() {
return memcachedClient.getTranscoder();
}
private long getLongValue(Future<Long> f) {
try {
Long l = f.get(DEFAULT_TIMEOUT, DEFAULT_TIMEUNIT);
return l.longValue();
} catch (Exception e) {
f.cancel(false);
}
return -1;
}
private boolean getBooleanValue(Future<Boolean> f) {
try {
Boolean bool = f.get(DEFAULT_TIMEOUT, DEFAULT_TIMEUNIT);
return bool.booleanValue();
} catch (Exception e) {
f.cancel(false);
return false;
}
}
public MemcachedClient getMemcachedClient() {
return memcachedClient;
}
public void setMemcachedClient(MemcachedClient memcachedClient) {
this.memcachedClient = memcachedClient;
}
public int getExpire() {
return expire;
}
public void setExpire(int expire) {
this.expire = expire;
}
} | apache-2.0 |
LucidDB/luciddb | farrago/src/org/eigenbase/rex/RexNode.java | 2822 | /*
// Licensed to DynamoBI Corporation (DynamoBI) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. DynamoBI licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
*/
package org.eigenbase.rex;
import org.eigenbase.reltype.*;
/**
* Row expression.
*
* <p>Every row-expression has a type. (Compare with {@link
* org.eigenbase.sql.SqlNode}, which is created before validation, and therefore
* types may not be available.)</p>
*
* <p>Some common row-expressions are: {@link RexLiteral} (constant value),
* {@link RexVariable} (variable), {@link RexCall} (call to operator with
* operands). Expressions are generally created using a {@link RexBuilder}
* factory.</p>
*
* @author jhyde
* @version $Id$
* @since Nov 22, 2003
*/
public abstract class RexNode
{
//~ Static fields/initializers ---------------------------------------------
public static final RexNode [] EMPTY_ARRAY = new RexNode[0];
//~ Instance fields --------------------------------------------------------
protected String digest;
//~ Methods ----------------------------------------------------------------
public abstract RelDataType getType();
public abstract RexNode clone();
/**
* Returns whether this expression always returns true. (Such as if this
* expression is equal to the literal <code>TRUE</code>.)
*/
public boolean isAlwaysTrue()
{
return false;
}
public boolean isA(RexKind kind)
{
return (getKind() == kind) || kind.includes(getKind());
}
/**
* Returns the kind of node this is.
*
* @return A {@link RexKind} value, never null
*
* @post return != null
*/
public RexKind getKind()
{
return RexKind.Other;
}
public String toString()
{
return digest;
}
/**
* Accepts a visitor, dispatching to the right overloaded {@link
* RexVisitor#visitInputRef visitXxx} method.
*
* <p>Also see {@link RexProgram#apply(RexVisitor, RexNode[], RexNode)},
* which applies a visitor to several expressions simultaneously.
*/
public abstract <R> R accept(RexVisitor<R> visitor);
}
// End RexNode.java
| apache-2.0 |
ricardocerq/elasticsearch | modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/SimpleNetty3TransportTests.java | 3668 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.transport.netty3;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.AbstractSimpleTransportTestCase;
import org.elasticsearch.transport.ConnectTransportException;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.transport.TransportSettings;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Collections;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
import static org.hamcrest.Matchers.containsString;
public class SimpleNetty3TransportTests extends AbstractSimpleTransportTestCase {
public static MockTransportService nettyFromThreadPool(
Settings settings,
ThreadPool threadPool, final Version version) {
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.emptyList());
Transport transport = new Netty3Transport(settings, threadPool, new NetworkService(settings, Collections.emptyList()),
BigArrays.NON_RECYCLING_INSTANCE, namedWriteableRegistry, new NoneCircuitBreakerService()) {
@Override
protected Version getCurrentVersion() {
return version;
}
};
return new MockTransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR);
}
@Override
protected MockTransportService build(Settings settings, Version version) {
settings = Settings.builder().put(settings).put(TransportSettings.PORT.getKey(), "0").build();
MockTransportService transportService = nettyFromThreadPool(settings, threadPool, version);
transportService.start();
return transportService;
}
public void testConnectException() throws UnknownHostException {
try {
serviceA.connectToNode(new DiscoveryNode("C", new InetSocketTransportAddress(InetAddress.getByName("localhost"), 9876),
emptyMap(), emptySet(),Version.CURRENT));
fail("Expected ConnectTransportException");
} catch (ConnectTransportException e) {
assertThat(e.getMessage(), containsString("connect_timeout"));
assertThat(e.getMessage(), containsString("[127.0.0.1:9876]"));
}
}
}
| apache-2.0 |
mbhk/barcode4j-modified | src/xmlgraphics-commons/java/org/krysalis/barcode4j/image/loader/PreloaderBarcode.java | 7440 | /*
* Copyright 2008,2010 Jeremias Maerki.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.krysalis.barcode4j.image.loader;
import java.io.IOException;
import java.io.InputStream;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Source;
import javax.xml.transform.dom.DOMSource;
import org.krysalis.barcode4j.BarcodeConstants;
import org.krysalis.barcode4j.BarcodeDimension;
import org.krysalis.barcode4j.BarcodeException;
import org.krysalis.barcode4j.BarcodeGenerator;
import org.krysalis.barcode4j.BarcodeUtil;
import org.krysalis.barcode4j.tools.ConfigurationUtil;
import org.krysalis.barcode4j.tools.MessageUtil;
import org.krysalis.barcode4j.tools.PageInfo;
import org.krysalis.barcode4j.tools.VariableUtil;
import org.w3c.dom.Document;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.apache.avalon.framework.configuration.Configuration;
import org.apache.avalon.framework.configuration.ConfigurationException;
import org.apache.xmlgraphics.image.loader.ImageContext;
import org.apache.xmlgraphics.image.loader.ImageInfo;
import org.apache.xmlgraphics.image.loader.ImageSize;
import org.apache.xmlgraphics.image.loader.impl.AbstractImagePreloader;
import org.apache.xmlgraphics.image.loader.util.ImageUtil;
import org.apache.xmlgraphics.util.UnitConv;
import org.apache.xmlgraphics.util.io.SubInputStream;
/**
* Image preloader for barcodes (barcode XML).
*/
public class PreloaderBarcode extends AbstractImagePreloader {
/** {@inheritDoc} */
public ImageInfo preloadImage(String uri, Source src, ImageContext context)
throws IOException {
ImageInfo info = null;
if (!isSupportedSource(src)) {
return null;
}
info = getImage(uri, src, context);
if (info != null) {
ImageUtil.closeQuietly(src); //Image is fully read
}
return info;
}
private ImageInfo getImage(String uri, Source src,
ImageContext context) throws IOException {
InputStream in = null;
try {
Document doc;
if (src instanceof DOMSource) {
DOMSource domSrc = (DOMSource)src;
doc = (Document)domSrc.getNode();
} else {
in = ImageUtil.needInputStream(src);
int length = in.available();
in.mark(length + 1);
try {
doc = getDocument(new SubInputStream(in, Long.MAX_VALUE, false));
} catch (IOException ioe) {
resetInputStream(in);
return null;
}
}
if (!BarcodeConstants.NAMESPACE.equals(
doc.getDocumentElement().getNamespaceURI())) {
resetInputStream(in);
return null;
}
ImageInfo info;
try {
info = createImageInfo(uri, context, doc);
} catch (ConfigurationException e) {
resetInputStream(in);
throw new IOException("Error in Barcode XML: " + e.getLocalizedMessage());
} catch (BarcodeException e) {
resetInputStream(in);
throw new IOException("Error processing Barcode XML: " + e.getLocalizedMessage());
}
return info;
} catch (SAXException se) {
resetInputStream(in);
return null;
} catch (ParserConfigurationException pce) {
//Parser not available, propagate exception
throw new RuntimeException(pce);
}
}
private void resetInputStream(InputStream in) {
try {
if (in != null) {
in.reset();
}
} catch (IOException ioe) {
//Ignored. We're more interested in the original exception.
}
}
private ImageInfo createImageInfo(String uri, ImageContext context, Document doc)
throws ConfigurationException, BarcodeException {
Configuration cfg = ConfigurationUtil.buildConfiguration(doc);
String msg = ConfigurationUtil.getMessage(cfg);
msg = MessageUtil.unescapeUnicode(msg);
int orientation = cfg.getAttributeAsInteger("orientation", 0);
orientation = BarcodeDimension.normalizeOrientation(orientation);
BarcodeGenerator bargen = BarcodeUtil.getInstance().
createBarcodeGenerator(cfg);
//Expand with null information and hope the size will match the actual barcode
String expandedMsg = VariableUtil.getExpandedMessage((PageInfo)null, msg);
BarcodeDimension bardim = bargen.calcDimensions(expandedMsg);
int widthMpt = (int)Math.ceil(UnitConv.mm2mpt(bardim.getWidthPlusQuiet(orientation)));
int heightMpt = (int)Math.ceil(UnitConv.mm2mpt(bardim.getHeightPlusQuiet(orientation)));
ImageInfo info = new ImageInfo(uri, ImageLoaderFactoryBarcode.MIME_TYPE);
ImageSize size = new ImageSize();
size.setSizeInMillipoints(widthMpt, heightMpt);
//Set the resolution to that of the FOUserAgent
size.setResolution(context.getSourceResolution());
size.calcPixelsFromSize();
info.setSize(size);
//The whole image had to be loaded to determine the image size, so keep that information
ImageBarcode barcodeImage = new ImageBarcode(info, cfg, bardim);
info.getCustomObjects().put(ImageInfo.ORIGINAL_IMAGE, barcodeImage);
//Add the non-expanded message!
info.getCustomObjects().put(ImageBarcode.MESSAGE, msg);
return info;
}
private boolean isSupportedSource(Source src) {
if (src instanceof DOMSource) {
DOMSource domSrc = (DOMSource)src;
return (domSrc.getNode() instanceof Document);
} else {
return ImageUtil.hasInputStream(src);
}
}
private Document getDocument(InputStream in)
throws IOException, SAXException, ParserConfigurationException {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
dbf.setNamespaceAware(true);
dbf.setValidating(false);
DocumentBuilder db = dbf.newDocumentBuilder();
db.setErrorHandler(new ErrorHandler() {
public void error(SAXParseException exception) throws SAXException {
throw exception;
}
public void fatalError(SAXParseException exception) throws SAXException {
throw exception;
}
public void warning(SAXParseException exception) throws SAXException {
throw exception;
}
});
Document doc = db.parse(in);
return doc;
}
}
| apache-2.0 |
suchinth08/lathaagency | Downloads/Latha NewsAgency/Latha NewsAgency/apps/src/main/java/in/anandm/apps/bo/TicketBO.java | 3578 | package in.anandm.apps.bo;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import in.anandm.apps.dao.TicketDAO;
import in.anandm.apps.domain.Ticket;
import in.anandm.apps.domain.UserData;
@Service
public class TicketBO {
private static final Logger logger = LoggerFactory.getLogger(TicketBO.class);
private TicketDAO ticketDAO;
@Autowired
public void setTicketDAO(TicketDAO ticketDAO) {
this.ticketDAO = ticketDAO;
}
public String saveTicket(Ticket ticket) {
logger.info("Start of execution of TicketBO --> saveTicket");
logger.info("the value of date is "+ticket.getExpectedDeployment());
String jsonVal = this.ticketDAO.saveTicket(ticket);
logger.info("End of execution of TicketBO --> saveTicket");
return jsonVal;
}
public String saveUser(UserData userData) {
String jsonVal = this.ticketDAO.saveUser(userData);
logger.info("End of execution of TicketBO --> saveUser");
return jsonVal;
}
public String getUserVal(String maintContact){
logger.info("Start of execution of TicketBO --> getTicketVal");
String jsonVal = this.ticketDAO.getJsonVal(maintContact);
logger.info("End of execution of TicketBO --> getTicketVal");
return jsonVal;
}
public String getTicketVal(String maintContact){
logger.info("Start of execution of TicketBO --> getTicketVal");
String jsonVal = this.ticketDAO.getJsonVal(maintContact);
logger.info("End of execution of TicketBO --> getTicketVal");
return jsonVal;
}
public String saveUpdateTicket(Ticket ticket) {
logger.info("Start of execution of Update TicketBO --> saveTicket");
logger.info("the value of date is: "+ticket.getExpectedDeployment());
String jsonVal = this.ticketDAO.saveUpdateTicket(ticket);
logger.info("End of execution of TicketBO --> saveTicket");
return jsonVal;
}
public int getNextSequence(String key){
int jsonVal = this.ticketDAO.getNextSequence(key);
logger.info("End of execution of TicketBO --> saveTicket");
return jsonVal;
}
public UserData getDatafromMongo(String pair,String value){
UserData userData = this.ticketDAO.getDatafromMongo(pair, value);
return userData;
}
public boolean confirmDatafromMongo(String token,String email){
return (this.ticketDAO.confirmDatafromMongo(token,email));
}
public boolean isValidUser(String userName,String passWord){
return(this.ticketDAO.isValidUser(userName, passWord));
}
public String findUserbyUserName(String userName,String passWord){
return(this.ticketDAO.findUserbyUserName(userName, passWord));
}
public UserData findUserDatabyUserName(String userName,String passWord){
return(this.ticketDAO.findUserDatabyUserName(userName, passWord));
}
public UserData findUserDatabyRegID(String regNum){
return(this.ticketDAO.findUserDatabyRegID(regNum));
}
public boolean saveUserPayData(UserData userData){
return(this.ticketDAO.saveUserPayData(userData));
}
public boolean updateUserDatabyApprovalType(String regNum,String approvalType){
return(this.ticketDAO.updateUserDatabyApprovalType(regNum, approvalType));
}
public UserData getDatafromMongobyTokenEmail(String token,String email){
return(this.ticketDAO.getDatafromMongobyTokenEmail(token,email));
}
public List<UserData> getAllUserData(){
return(this.ticketDAO.getAllUserData());
}
}
| apache-2.0 |
yonglehou/cassandra | src/java/org/apache/cassandra/db/view/MaterializedView.java | 29762 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.view;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import javax.annotation.Nullable;
import com.google.common.collect.Iterables;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.config.MaterializedViewDefinition;
import org.apache.cassandra.config.Schema;
import org.apache.cassandra.cql3.ColumnIdentifier;
import org.apache.cassandra.cql3.statements.CFProperties;
import org.apache.cassandra.db.AbstractReadCommandBuilder.SinglePartitionSliceBuilder;
import org.apache.cassandra.db.CBuilder;
import org.apache.cassandra.db.Clustering;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.DecoratedKey;
import org.apache.cassandra.db.DeletionInfo;
import org.apache.cassandra.db.DeletionTime;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.db.LivenessInfo;
import org.apache.cassandra.db.Mutation;
import org.apache.cassandra.db.RangeTombstone;
import org.apache.cassandra.db.ReadCommand;
import org.apache.cassandra.db.ReadOrderGroup;
import org.apache.cassandra.db.SinglePartitionReadCommand;
import org.apache.cassandra.db.Slice;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.db.partitions.AbstractBTreePartition;
import org.apache.cassandra.db.partitions.PartitionIterator;
import org.apache.cassandra.db.partitions.PartitionUpdate;
import org.apache.cassandra.db.rows.BTreeRow;
import org.apache.cassandra.db.rows.Cell;
import org.apache.cassandra.db.rows.ColumnData;
import org.apache.cassandra.db.rows.ComplexColumnData;
import org.apache.cassandra.db.rows.Row;
import org.apache.cassandra.db.rows.RowIterator;
import org.apache.cassandra.schema.KeyspaceMetadata;
import org.apache.cassandra.service.pager.QueryPager;
/**
* A Materialized View copies data from a base table into a view table which can be queried independently from the
* base. Every update which targets the base table must be fed through the {@link MaterializedViewManager} to ensure
* that if a view needs to be updated, the updates are properly created and fed into the view.
*
* This class does the job of translating the base row to the view row.
*
* It handles reading existing state and figuring out what tombstones need to be generated.
*
* createMutations below is the "main method"
*
*/
public class MaterializedView
{
/**
* The columns should all be updated together, so we use this object as group.
*/
private static class MVColumns
{
//These are the base column definitions in terms of the *views* partitioning.
//Meaning we can see (for example) the partition key of the view contains a clustering key
//from the base table.
public final List<ColumnDefinition> partitionDefs;
public final List<ColumnDefinition> primaryKeyDefs;
public final List<ColumnDefinition> baseComplexColumns;
private MVColumns(List<ColumnDefinition> partitionDefs, List<ColumnDefinition> primaryKeyDefs, List<ColumnDefinition> baseComplexColumns)
{
this.partitionDefs = partitionDefs;
this.primaryKeyDefs = primaryKeyDefs;
this.baseComplexColumns = baseComplexColumns;
}
}
public final String name;
private final ColumnFamilyStore baseCfs;
private ColumnFamilyStore _viewCfs = null;
private MVColumns columns;
private final boolean viewHasAllPrimaryKeys;
private final boolean includeAll;
private MaterializedViewBuilder builder;
public MaterializedView(MaterializedViewDefinition definition,
ColumnFamilyStore baseCfs)
{
this.baseCfs = baseCfs;
name = definition.viewName;
includeAll = definition.includeAll;
viewHasAllPrimaryKeys = updateDefinition(definition);
}
/**
* Lazily fetch the CFS instance for the view.
* We do this lazily to avoid initilization issues.
*
* @return The views CFS instance
*/
public ColumnFamilyStore getViewCfs()
{
if (_viewCfs == null)
_viewCfs = Keyspace.openAndGetStore(Schema.instance.getCFMetaData(baseCfs.keyspace.getName(), name));
return _viewCfs;
}
/**
* Lookup column definitions in the base table that correspond to the view columns (should be 1:1)
*
* Notify caller if all primary keys in the view are ALL primary keys in the base. We do this to simplify
* tombstone checks.
*
* @param columns a list of columns to lookup in the base table
* @param definitions lists to populate for the base table definitions
* @return true if all view PKs are also Base PKs
*/
private boolean resolveAndAddColumns(Iterable<ColumnIdentifier> columns, List<ColumnDefinition>... definitions)
{
boolean allArePrimaryKeys = true;
for (ColumnIdentifier identifier : columns)
{
ColumnDefinition cdef = baseCfs.metadata.getColumnDefinition(identifier);
assert cdef != null : "Could not resolve column " + identifier.toString();
for (List<ColumnDefinition> list : definitions)
{
list.add(cdef);
}
allArePrimaryKeys = allArePrimaryKeys && cdef.isPrimaryKeyColumn();
}
return allArePrimaryKeys;
}
/**
* This updates the columns stored which are dependent on the base CFMetaData.
*
* @return true if the view contains only columns which are part of the base's primary key; false if there is at
* least one column which is not.
*/
public boolean updateDefinition(MaterializedViewDefinition definition)
{
List<ColumnDefinition> partitionDefs = new ArrayList<>(definition.partitionColumns.size());
List<ColumnDefinition> primaryKeyDefs = new ArrayList<>(definition.partitionColumns.size()
+ definition.clusteringColumns.size());
List<ColumnDefinition> baseComplexColumns = new ArrayList<>();
// We only add the partition columns to the partitions list, but both partition columns and clustering
// columns are added to the primary keys list
boolean partitionAllPrimaryKeyColumns = resolveAndAddColumns(definition.partitionColumns, primaryKeyDefs, partitionDefs);
boolean clusteringAllPrimaryKeyColumns = resolveAndAddColumns(definition.clusteringColumns, primaryKeyDefs);
for (ColumnDefinition cdef : baseCfs.metadata.allColumns())
{
if (cdef.isComplex())
{
baseComplexColumns.add(cdef);
}
}
this.columns = new MVColumns(partitionDefs, primaryKeyDefs, baseComplexColumns);
return partitionAllPrimaryKeyColumns && clusteringAllPrimaryKeyColumns;
}
/**
* Check to see if the update could possibly modify a view. Cases where the view may be updated are:
* <ul>
* <li>View selects all columns</li>
* <li>Update contains any range tombstones</li>
* <li>Update touches one of the columns included in the view</li>
* </ul>
*
* If the update contains any range tombstones, there is a possibility that it will not touch a range that is
* currently included in the view.
*
* @return true if {@param partition} modifies a column included in the view
*/
public boolean updateAffectsView(AbstractBTreePartition partition)
{
// If we are including all of the columns, then any update will be included
if (includeAll)
return true;
// If there are range tombstones, tombstones will also need to be generated for the materialized view
// This requires a query of the base rows and generating tombstones for all of those values
if (!partition.deletionInfo().isLive())
return true;
// Check each row for deletion or update
for (Row row : partition)
{
if (row.hasComplexDeletion())
return true;
if (!row.deletion().isLive())
return true;
for (ColumnData data : row)
{
if (getViewCfs().metadata.getColumnDefinition(data.column().name) != null)
return true;
}
}
return false;
}
/**
* Creates the clustering columns for the view based on the specified row and resolver policy
*
* @param temporalRow The current row
* @param resolver The policy to use when selecting versions of cells use
* @return The clustering object to use for the view
*/
private Clustering viewClustering(TemporalRow temporalRow, TemporalRow.Resolver resolver)
{
CFMetaData viewCfm = getViewCfs().metadata;
int numViewClustering = viewCfm.clusteringColumns().size();
CBuilder clustering = CBuilder.create(getViewCfs().getComparator());
for (int i = 0; i < numViewClustering; i++)
{
ColumnDefinition definition = viewCfm.clusteringColumns().get(i);
clustering.add(temporalRow.clusteringValue(definition, resolver));
}
return clustering.build();
}
/**
* @return Mutation containing a range tombstone for a base partition key and TemporalRow.
*/
private PartitionUpdate createTombstone(TemporalRow temporalRow,
DecoratedKey partitionKey,
Row.Deletion deletion,
TemporalRow.Resolver resolver,
int nowInSec)
{
CFMetaData viewCfm = getViewCfs().metadata;
Row.Builder builder = BTreeRow.unsortedBuilder(nowInSec);
builder.newRow(viewClustering(temporalRow, resolver));
builder.addRowDeletion(deletion);
return PartitionUpdate.singleRowUpdate(viewCfm, partitionKey, builder.build());
}
/**
* @return PartitionUpdate containing a complex tombstone for a TemporalRow, and the collection's column identifier.
*/
private PartitionUpdate createComplexTombstone(TemporalRow temporalRow,
DecoratedKey partitionKey,
ColumnDefinition deletedColumn,
DeletionTime deletionTime,
TemporalRow.Resolver resolver,
int nowInSec)
{
CFMetaData viewCfm = getViewCfs().metadata;
Row.Builder builder = BTreeRow.unsortedBuilder(nowInSec);
builder.newRow(viewClustering(temporalRow, resolver));
builder.addComplexDeletion(deletedColumn, deletionTime);
return PartitionUpdate.singleRowUpdate(viewCfm, partitionKey, builder.build());
}
/**
* @return View's DecoratedKey or null, if one of the view's primary key components has an invalid resolution from
* the TemporalRow and its Resolver
*/
private DecoratedKey viewPartitionKey(TemporalRow temporalRow, TemporalRow.Resolver resolver)
{
List<ColumnDefinition> partitionDefs = this.columns.partitionDefs;
Object[] partitionKey = new Object[partitionDefs.size()];
for (int i = 0; i < partitionKey.length; i++)
{
ByteBuffer value = temporalRow.clusteringValue(partitionDefs.get(i), resolver);
if (value == null)
return null;
partitionKey[i] = value;
}
CFMetaData metadata = getViewCfs().metadata;
return metadata.decorateKey(CFMetaData.serializePartitionKey(metadata
.getKeyValidatorAsClusteringComparator()
.make(partitionKey)));
}
/**
* @return mutation which contains the tombstone for the referenced TemporalRow, or null if not necessary.
* TemporalRow's can reference at most one view row; there will be at most one row to be tombstoned, so only one
* mutation is necessary
*/
private PartitionUpdate createRangeTombstoneForRow(TemporalRow temporalRow)
{
// Primary Key and Clustering columns do not generate tombstones
if (viewHasAllPrimaryKeys)
return null;
boolean hasUpdate = false;
List<ColumnDefinition> primaryKeyDefs = this.columns.primaryKeyDefs;
for (ColumnDefinition viewPartitionKeys : primaryKeyDefs)
{
if (!viewPartitionKeys.isPrimaryKeyColumn() && temporalRow.clusteringValue(viewPartitionKeys, TemporalRow.oldValueIfUpdated) != null)
hasUpdate = true;
}
if (!hasUpdate)
return null;
TemporalRow.Resolver resolver = TemporalRow.earliest;
return createTombstone(temporalRow,
viewPartitionKey(temporalRow, resolver),
Row.Deletion.shadowable(new DeletionTime(temporalRow.viewClusteringTimestamp(), temporalRow.nowInSec)),
resolver,
temporalRow.nowInSec);
}
/**
* @return Mutation which is the transformed base table mutation for the materialized view.
*/
private PartitionUpdate createUpdatesForInserts(TemporalRow temporalRow)
{
TemporalRow.Resolver resolver = TemporalRow.latest;
DecoratedKey partitionKey = viewPartitionKey(temporalRow, resolver);
ColumnFamilyStore viewCfs = getViewCfs();
if (partitionKey == null)
{
// Not having a partition key means we aren't updating anything
return null;
}
Row.Builder regularBuilder = BTreeRow.unsortedBuilder(temporalRow.nowInSec);
CBuilder clustering = CBuilder.create(viewCfs.getComparator());
for (int i = 0; i < viewCfs.metadata.clusteringColumns().size(); i++)
{
clustering.add(temporalRow.clusteringValue(viewCfs.metadata.clusteringColumns().get(i), resolver));
}
regularBuilder.newRow(clustering.build());
regularBuilder.addPrimaryKeyLivenessInfo(LivenessInfo.create(viewCfs.metadata,
temporalRow.viewClusteringTimestamp(),
temporalRow.viewClusteringTtl(),
temporalRow.viewClusteringLocalDeletionTime()));
for (ColumnDefinition columnDefinition : viewCfs.metadata.allColumns())
{
if (columnDefinition.isPrimaryKeyColumn())
continue;
for (Cell cell : temporalRow.values(columnDefinition, resolver))
{
regularBuilder.addCell(cell);
}
}
return PartitionUpdate.singleRowUpdate(viewCfs.metadata, partitionKey, regularBuilder.build());
}
/**
* @param partition Update which possibly contains deletion info for which to generate view tombstones.
* @return View Tombstones which delete all of the rows which have been removed from the base table with
* {@param partition}
*/
private Collection<Mutation> createForDeletionInfo(TemporalRow.Set rowSet, AbstractBTreePartition partition)
{
final TemporalRow.Resolver resolver = TemporalRow.earliest;
DeletionInfo deletionInfo = partition.deletionInfo();
List<Mutation> mutations = new ArrayList<>();
// Check the complex columns to see if there are any which may have tombstones we need to create for the view
if (!columns.baseComplexColumns.isEmpty())
{
for (Row row : partition)
{
if (!row.hasComplexDeletion())
continue;
TemporalRow temporalRow = rowSet.getClustering(row.clustering());
assert temporalRow != null;
for (ColumnDefinition definition : columns.baseComplexColumns)
{
ComplexColumnData columnData = row.getComplexColumnData(definition);
if (columnData != null)
{
DeletionTime time = columnData.complexDeletion();
if (!time.isLive())
{
DecoratedKey targetKey = viewPartitionKey(temporalRow, resolver);
if (targetKey != null)
mutations.add(new Mutation(createComplexTombstone(temporalRow, targetKey, definition, time, resolver, temporalRow.nowInSec)));
}
}
}
}
}
ReadCommand command = null;
if (!deletionInfo.isLive())
{
// We have to generate tombstones for all of the affected rows, but we don't have the information in order
// to create them. This requires that we perform a read for the entire range that is being tombstoned, and
// generate a tombstone for each. This may be slow, because a single range tombstone can cover up to an
// entire partition of data which is not distributed on a single partition node.
DecoratedKey dk = rowSet.dk;
if (deletionInfo.hasRanges())
{
SinglePartitionSliceBuilder builder = new SinglePartitionSliceBuilder(baseCfs, dk);
Iterator<RangeTombstone> tombstones = deletionInfo.rangeIterator(false);
while (tombstones.hasNext())
{
RangeTombstone tombstone = tombstones.next();
builder.addSlice(tombstone.deletedSlice());
}
command = builder.build();
}
else
{
command = SinglePartitionReadCommand.fullPartitionRead(baseCfs.metadata, rowSet.nowInSec, dk);
}
}
if (command == null)
{
SinglePartitionSliceBuilder builder = null;
for (Row row : partition)
{
if (!row.deletion().isLive())
{
if (builder == null)
builder = new SinglePartitionSliceBuilder(baseCfs, rowSet.dk);
builder.addSlice(Slice.make(row.clustering()));
}
}
if (builder != null)
command = builder.build();
}
if (command != null)
{
//We may have already done this work for
//another MV update so check
if (!rowSet.hasTombstonedExisting())
{
QueryPager pager = command.getPager(null);
// Add all of the rows which were recovered from the query to the row set
while (!pager.isExhausted())
{
try (ReadOrderGroup orderGroup = pager.startOrderGroup();
PartitionIterator iter = pager.fetchPageInternal(128, orderGroup))
{
if (!iter.hasNext())
break;
try (RowIterator rowIterator = iter.next())
{
while (rowIterator.hasNext())
{
Row row = rowIterator.next();
rowSet.addRow(row, false);
}
}
}
}
//Incase we fetched nothing, avoid re checking on another MV update
rowSet.setTombstonedExisting();
}
// If the temporal row has been deleted by the deletion info, we generate the corresponding range tombstone
// for the view.
for (TemporalRow temporalRow : rowSet)
{
DeletionTime deletionTime = temporalRow.deletionTime(partition);
if (!deletionTime.isLive())
{
DecoratedKey value = viewPartitionKey(temporalRow, resolver);
if (value != null)
{
PartitionUpdate update = createTombstone(temporalRow, value, Row.Deletion.regular(deletionTime), resolver, temporalRow.nowInSec);
if (update != null)
mutations.add(new Mutation(update));
}
}
}
}
return !mutations.isEmpty() ? mutations : null;
}
/**
* Read and update temporal rows in the set which have corresponding values stored on the local node
*/
private void readLocalRows(TemporalRow.Set rowSet)
{
SinglePartitionSliceBuilder builder = new SinglePartitionSliceBuilder(baseCfs, rowSet.dk);
for (TemporalRow temporalRow : rowSet)
builder.addSlice(temporalRow.baseSlice());
QueryPager pager = builder.build().getPager(null);
while (!pager.isExhausted())
{
try (ReadOrderGroup orderGroup = pager.startOrderGroup();
PartitionIterator iter = pager.fetchPageInternal(128, orderGroup))
{
while (iter.hasNext())
{
try (RowIterator rows = iter.next())
{
while (rows.hasNext())
{
rowSet.addRow(rows.next(), false);
}
}
}
}
}
}
/**
* @return Set of rows which are contained in the partition update {@param partition}
*/
private TemporalRow.Set separateRows(AbstractBTreePartition partition, Set<ColumnIdentifier> viewPrimaryKeyCols)
{
TemporalRow.Set rowSet = new TemporalRow.Set(baseCfs, viewPrimaryKeyCols, partition.partitionKey().getKey());
for (Row row : partition)
rowSet.addRow(row, true);
return rowSet;
}
/**
* Splits the partition update up and adds the existing state to each row.
* This data can be reused for multiple MV updates on the same base table
*
* @param partition the mutation
* @param isBuilding If the view is currently being built, we do not query the values which are already stored,
* since all of the update will already be present in the base table.
* @return The set of temoral rows contained in this update
*/
public TemporalRow.Set getTemporalRowSet(AbstractBTreePartition partition, TemporalRow.Set existing, boolean isBuilding)
{
if (!updateAffectsView(partition))
return null;
Set<ColumnIdentifier> columns = new HashSet<>(this.columns.primaryKeyDefs.size());
for (ColumnDefinition def : this.columns.primaryKeyDefs)
columns.add(def.name);
TemporalRow.Set rowSet = null;
if (existing == null)
{
rowSet = separateRows(partition, columns);
// If we are building the view, we do not want to add old values; they will always be the same
if (!isBuilding)
readLocalRows(rowSet);
}
else
{
rowSet = existing.withNewViewPrimaryKey(columns);
}
return rowSet;
}
/**
* @param isBuilding If the view is currently being built, we do not query the values which are already stored,
* since all of the update will already be present in the base table.
* @return View mutations which represent the changes necessary as long as previously created mutations for the view
* have been applied successfully. This is based solely on the changes that are necessary given the current
* state of the base table and the newly applying partition data.
*/
public Collection<Mutation> createMutations(AbstractBTreePartition partition, TemporalRow.Set rowSet, boolean isBuilding)
{
if (!updateAffectsView(partition))
return null;
Collection<Mutation> mutations = null;
for (TemporalRow temporalRow : rowSet)
{
// If we are building, there is no need to check for partition tombstones; those values will not be present
// in the partition data
if (!isBuilding)
{
PartitionUpdate partitionTombstone = createRangeTombstoneForRow(temporalRow);
if (partitionTombstone != null)
{
if (mutations == null) mutations = new LinkedList<>();
mutations.add(new Mutation(partitionTombstone));
}
}
PartitionUpdate insert = createUpdatesForInserts(temporalRow);
if (insert != null)
{
if (mutations == null) mutations = new LinkedList<>();
mutations.add(new Mutation(insert));
}
}
if (!isBuilding)
{
Collection<Mutation> deletion = createForDeletionInfo(rowSet, partition);
if (deletion != null && !deletion.isEmpty())
{
if (mutations == null) mutations = new LinkedList<>();
mutations.addAll(deletion);
}
}
return mutations;
}
public synchronized void build()
{
if (this.builder != null)
{
this.builder.stop();
this.builder = null;
}
this.builder = new MaterializedViewBuilder(baseCfs, this);
CompactionManager.instance.submitMaterializedViewBuilder(builder);
}
@Nullable
public static CFMetaData findBaseTable(String keyspace, String view)
{
KeyspaceMetadata ksm = Schema.instance.getKSMetaData(keyspace);
if (ksm == null)
return null;
for (CFMetaData cfm : ksm.tables)
if (cfm.getMaterializedViews().get(view).isPresent())
return cfm;
return null;
}
/**
* @return CFMetaData which represents the definition given
*/
public static CFMetaData getCFMetaData(MaterializedViewDefinition definition,
CFMetaData baseCf,
CFProperties properties)
{
CFMetaData.Builder viewBuilder = CFMetaData.Builder
.createView(baseCf.ksName, definition.viewName);
ColumnDefinition nonPkTarget = null;
for (ColumnIdentifier targetIdentifier : definition.partitionColumns)
{
ColumnDefinition target = baseCf.getColumnDefinition(targetIdentifier);
if (!target.isPartitionKey())
nonPkTarget = target;
viewBuilder.addPartitionKey(target.name, properties.getReversableType(targetIdentifier, target.type));
}
Collection<ColumnDefinition> included = new ArrayList<>();
for(ColumnIdentifier identifier : definition.included)
{
ColumnDefinition cfDef = baseCf.getColumnDefinition(identifier);
assert cfDef != null;
included.add(cfDef);
}
boolean includeAll = included.isEmpty();
for (ColumnIdentifier ident : definition.clusteringColumns)
{
ColumnDefinition column = baseCf.getColumnDefinition(ident);
viewBuilder.addClusteringColumn(ident, properties.getReversableType(ident, column.type));
}
for (ColumnDefinition column : baseCf.partitionColumns().regulars)
{
if (column != nonPkTarget && (includeAll || included.contains(column)))
{
viewBuilder.addRegularColumn(column.name, column.type);
}
}
//Add any extra clustering columns
for (ColumnDefinition column : Iterables.concat(baseCf.partitionKeyColumns(), baseCf.clusteringColumns()))
{
if ( (!definition.partitionColumns.contains(column.name) && !definition.clusteringColumns.contains(column.name)) &&
(includeAll || included.contains(column)) )
{
viewBuilder.addRegularColumn(column.name, column.type);
}
}
return viewBuilder.build().params(properties.properties.asNewTableParams());
}
}
| apache-2.0 |
Dreamer012/ExamOnline | src/main/java/cn/examsys/common/model/base/Exam.java | 2355 | package cn.examsys.common.model.base;
import cn.examsys.Constants;
import org.hibernate.annotations.GenericGenerator;
import javax.persistence.*;
import java.util.Date;
@Entity
@Table(name = Constants.TABLE_EXAM)
public class Exam {
/**
* 考试id
*/
private Integer id;
/**
* 参加考试的student
*/
private Student student;
/**
* 考试对应的试卷
*/
private Paper paper;
/**
* 单选题得分
*/
private int singleScore;
/**
* 多选题得分
*/
private int multipleScore;
/**
* 总分
*/
private int sumScore;
/**
* 考试日期
*/
private Date examDate;
@Id
@GeneratedValue(generator = "_native")
@GenericGenerator(name = "_native", strategy = "native")
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@ManyToOne
@JoinColumn(name = "studentId")
public Student getStudent() {
return student;
}
public void setStudent(Student student) {
this.student = student;
}
@ManyToOne
@JoinColumn(name = "paperId")
public Paper getPaper() {
return paper;
}
public void setPaper(Paper paper) {
this.paper = paper;
}
public int getSingleScore() {
return singleScore;
}
public void setSingleScore(int singleScore) {
this.singleScore = singleScore;
}
public int getMultipleScore() {
return multipleScore;
}
public void setMultipleScore(int multipleScore) {
this.multipleScore = multipleScore;
}
public int getSumScore() {
return sumScore;
}
public void setSumScore(int sumScore) {
this.sumScore = sumScore;
}
public Date getExamDate() {
return examDate;
}
public void setExamDate(Date examDate) {
this.examDate = examDate;
}
@Override
public String toString() {
return "Exam{" +
"id=" + id +
", student=" + student +
", paper=" + paper +
", singleScore=" + singleScore +
", multipleScore=" + multipleScore +
", sumScore=" + sumScore +
", examDate=" + examDate +
'}';
}
}
| apache-2.0 |
wildfly-extras/wildfly-camel | itests/standalone/basic/src/test/java/org/wildfly/camel/test/sql/SQLIntegrationTest.java | 5698 | /*
* #%L
* Wildfly Camel :: Testsuite
* %%
* Copyright (C) 2013 - 2014 RedHat
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.wildfly.camel.test.sql;
import java.util.Map;
import javax.annotation.Resource;
import javax.sql.DataSource;
import org.apache.camel.CamelContext;
import org.apache.camel.PollingConsumer;
import org.apache.camel.ProducerTemplate;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.camel.processor.idempotent.jdbc.JdbcMessageIdRepository;
import org.apache.camel.support.jndi.JndiBeanRepository;
import org.jboss.arquillian.container.test.api.Deployer;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.arquillian.test.api.ArquillianResource;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.asset.EmptyAsset;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.wildfly.camel.test.sql.subA.CDIRouteBuilder;
import org.wildfly.extension.camel.CamelAware;
import org.wildfly.extension.camel.CamelContextRegistry;
@CamelAware
@RunWith(Arquillian.class)
public class SQLIntegrationTest {
private static final String CAMEL_SQL_CDI_ROUTES_JAR = "camel-sql-cdi-routes.jar";
@ArquillianResource
CamelContextRegistry contextRegistry;
@ArquillianResource
Deployer deployer;
@Resource(lookup = "java:jboss/datasources/ExampleDS")
DataSource dataSource;
@Deployment
public static JavaArchive createDeployment() {
final JavaArchive archive = ShrinkWrap.create(JavaArchive.class, "camel-sql-tests.jar");
archive.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml");
return archive;
}
@Deployment(managed = false, name = CAMEL_SQL_CDI_ROUTES_JAR)
public static JavaArchive createCDIDeployment() {
final JavaArchive archive = ShrinkWrap.create(JavaArchive.class, CAMEL_SQL_CDI_ROUTES_JAR);
archive.addPackage(CDIRouteBuilder.class.getPackage());
archive.addAsManifestResource(EmptyAsset.INSTANCE, "beans.xml");
return archive;
}
@Test
public void testSQLEndpoint() throws Exception {
Assert.assertNotNull("DataSource not null", dataSource);
CamelContext camelctx = new DefaultCamelContext(new JndiBeanRepository());
camelctx.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("sql:select name from information_schema.users?dataSource=java:jboss/datasources/ExampleDS")
.to("seda:end");
}
});
PollingConsumer pollingConsumer = camelctx.getEndpoint("seda:end").createPollingConsumer();
pollingConsumer.start();
camelctx.start();
try {
String result = (String) pollingConsumer.receive(3000).getIn().getBody(Map.class).get("NAME");
Assert.assertEquals("SA", result);
} finally {
camelctx.close();
}
}
@Test
public void testSQLEndpointWithCDIContext() throws Exception {
try {
deployer.deploy(CAMEL_SQL_CDI_ROUTES_JAR);
CamelContext camelctx = contextRegistry.getCamelContext("camel-sql-cdi-context");
Assert.assertNotNull("Camel context not null", camelctx);
PollingConsumer pollingConsumer = camelctx.getEndpoint("seda:end").createPollingConsumer();
pollingConsumer.start();
String result = (String) pollingConsumer.receive(3000).getIn().getBody(Map.class).get("NAME");
Assert.assertEquals("SA", result);
} finally {
deployer.undeploy(CAMEL_SQL_CDI_ROUTES_JAR);
}
}
@Test
public void testSqlIdempotentConsumer() throws Exception {
Assert.assertNotNull("DataSource not null", dataSource);
final JdbcMessageIdRepository jdbcMessageIdRepository = new JdbcMessageIdRepository(dataSource, "myProcessorName");
CamelContext camelctx = new DefaultCamelContext();
camelctx.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start")
.idempotentConsumer(simple("${header.messageId}"), jdbcMessageIdRepository)
.to("mock:result");
}
});
camelctx.start();
try {
MockEndpoint mockEndpoint = camelctx.getEndpoint("mock:result", MockEndpoint.class);
mockEndpoint.expectedMessageCount(1);
// Send 5 messages with the same messageId header. Only 1 should be forwarded to the mock:result endpoint
ProducerTemplate template = camelctx.createProducerTemplate();
for (int i = 0; i < 5; i++) {
template.requestBodyAndHeader("direct:start", null, "messageId", "12345");
}
mockEndpoint.assertIsSatisfied();
} finally {
camelctx.close();
}
}
}
| apache-2.0 |
Stratio/stratio-cassandra | src/java/com/stratio/cassandra/index/schema/mapping/ColumnMapperDate.java | 4256 | /*
* Copyright 2014, Stratio.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.cassandra.index.schema.mapping;
import com.google.common.base.Objects;
import org.apache.cassandra.db.marshal.*;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LongField;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.SortField.Type;
import org.codehaus.jackson.annotate.JsonCreator;
import org.codehaus.jackson.annotate.JsonProperty;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* A {@link ColumnMapper} to map a date field.
*
* @author Andres de la Pena <adelapena@stratio.com>
*/
public class ColumnMapperDate extends ColumnMapperSingle<Long> {
/** The default {@link SimpleDateFormat} pattern. */
public static final String DEFAULT_PATTERN = "yyyy/MM/dd HH:mm:ss.SSS";
/** The {@link SimpleDateFormat} pattern. */
private final String pattern;
/** The thread safe date format. */
private final ThreadLocal<DateFormat> concurrentDateFormat;
/**
* Builds a new {@link ColumnMapperDate} using the specified pattern.
*
* @param pattern The {@link SimpleDateFormat} pattern to be used.
*/
@JsonCreator
public ColumnMapperDate(@JsonProperty("pattern") String pattern) {
super(new AbstractType<?>[]{AsciiType.instance,
UTF8Type.instance,
Int32Type.instance,
LongType.instance,
IntegerType.instance,
FloatType.instance,
DoubleType.instance,
DecimalType.instance,
TimestampType.instance},
new AbstractType[]{LongType.instance, TimestampType.instance});
this.pattern = pattern == null ? DEFAULT_PATTERN : pattern;
concurrentDateFormat = new ThreadLocal<DateFormat>() {
@Override
protected DateFormat initialValue() {
return new SimpleDateFormat(ColumnMapperDate.this.pattern);
}
};
}
/** {@inheritDoc} */
@Override
public Long indexValue(String name, Object value) {
if (value == null) {
return null;
} else if (value instanceof Date) {
return ((Date) value).getTime();
} else if (value instanceof Number) {
return ((Number) value).longValue();
} else if (value instanceof String) {
try {
return concurrentDateFormat.get().parse(value.toString()).getTime();
} catch (ParseException e) {
throw new IllegalArgumentException(e);
}
} else {
throw new IllegalArgumentException();
}
}
/** {@inheritDoc} */
@Override
public Long queryValue(String name, Object value) {
return indexValue(name, value);
}
/** {@inheritDoc} */
@Override
public Field field(String name, Object value) {
return new LongField(name, indexValue(name, value), STORE);
}
/** {@inheritDoc} */
@Override
public SortField sortField(String field, boolean reverse) {
return new SortField(field, Type.LONG, reverse);
}
/** {@inheritDoc} */
@Override
public Class<Long> baseClass() {
return Long.class;
}
/** {@inheritDoc} */
@Override
public String toString() {
return Objects.toStringHelper(this).add("pattern", pattern).toString();
}
}
| apache-2.0 |
tgroh/incubator-beam | runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/WriteFilesTranslation.java | 13058 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.core.construction;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.beam.runners.core.construction.PTransformTranslation.WRITE_FILES_TRANSFORM_URN;
import com.google.auto.service.AutoService;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.MoreObjects;
import com.google.common.collect.Lists;
import com.google.protobuf.ByteString;
import java.io.IOException;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.beam.model.pipeline.v1.RunnerApi;
import org.apache.beam.model.pipeline.v1.RunnerApi.FunctionSpec;
import org.apache.beam.model.pipeline.v1.RunnerApi.SdkFunctionSpec;
import org.apache.beam.model.pipeline.v1.RunnerApi.SideInput;
import org.apache.beam.model.pipeline.v1.RunnerApi.WriteFilesPayload;
import org.apache.beam.runners.core.construction.PTransformTranslation.TransformPayloadTranslator;
import org.apache.beam.sdk.io.FileBasedSink;
import org.apache.beam.sdk.io.WriteFiles;
import org.apache.beam.sdk.io.WriteFilesResult;
import org.apache.beam.sdk.runners.AppliedPTransform;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.util.SerializableUtils;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.PInput;
import org.apache.beam.sdk.values.POutput;
import org.apache.beam.sdk.values.PValue;
import org.apache.beam.sdk.values.TupleTag;
/**
* Utility methods for translating a {@link WriteFiles} to and from {@link RunnerApi}
* representations.
*/
public class WriteFilesTranslation {
/** The URN for an unknown Java {@link FileBasedSink}. */
public static final String CUSTOM_JAVA_FILE_BASED_SINK_URN =
"urn:beam:file_based_sink:javasdk:0.1";
@VisibleForTesting
static WriteFilesPayload payloadForWriteFiles(
final WriteFiles<?, ?, ?> transform, SdkComponents components) throws IOException {
return payloadForWriteFilesLike(
new WriteFilesLike() {
@Override
public SdkFunctionSpec translateSink(SdkComponents newComponents) {
// TODO: register the environment
return toProto(transform.getSink());
}
@Override
public Map<String, SideInput> translateSideInputs(SdkComponents components) {
Map<String, SideInput> sideInputs = new HashMap<>();
for (PCollectionView<?> view :
transform.getSink().getDynamicDestinations().getSideInputs()) {
sideInputs.put(
view.getTagInternal().getId(), ParDoTranslation.translateView(view, components));
}
return sideInputs;
}
@Override
public boolean isWindowedWrites() {
return transform.getWindowedWrites();
}
@Override
public boolean isRunnerDeterminedSharding() {
return transform.getNumShardsProvider() == null
&& transform.getComputeNumShards() == null;
}
},
components);
}
private static SdkFunctionSpec toProto(FileBasedSink<?, ?, ?> sink) {
return toProto(CUSTOM_JAVA_FILE_BASED_SINK_URN, sink);
}
private static SdkFunctionSpec toProto(String urn, Serializable serializable) {
return SdkFunctionSpec.newBuilder()
.setSpec(
FunctionSpec.newBuilder()
.setUrn(urn)
.setPayload(
ByteString.copyFrom(SerializableUtils.serializeToByteArray(serializable)))
.build())
.build();
}
@VisibleForTesting
static FileBasedSink<?, ?, ?> sinkFromProto(SdkFunctionSpec sinkProto) throws IOException {
checkArgument(
sinkProto.getSpec().getUrn().equals(CUSTOM_JAVA_FILE_BASED_SINK_URN),
"Cannot extract %s instance from %s with URN %s",
FileBasedSink.class.getSimpleName(),
FunctionSpec.class.getSimpleName(),
sinkProto.getSpec().getUrn());
byte[] serializedSink = sinkProto.getSpec().getPayload().toByteArray();
return (FileBasedSink<?, ?, ?>)
SerializableUtils.deserializeFromByteArray(
serializedSink, FileBasedSink.class.getSimpleName());
}
public static <UserT, DestinationT, OutputT> FileBasedSink<UserT, DestinationT, OutputT> getSink(
AppliedPTransform<
PCollection<UserT>, WriteFilesResult<DestinationT>,
? extends PTransform<PCollection<UserT>, WriteFilesResult<DestinationT>>>
transform)
throws IOException {
return (FileBasedSink<UserT, DestinationT, OutputT>)
sinkFromProto(getWriteFilesPayload(transform).getSink());
}
public static <UserT, DestinationT> List<PCollectionView<?>> getDynamicDestinationSideInputs(
AppliedPTransform<
PCollection<UserT>, WriteFilesResult<DestinationT>,
? extends PTransform<PCollection<UserT>, WriteFilesResult<DestinationT>>>
transform)
throws IOException {
SdkComponents sdkComponents = SdkComponents.create();
RunnerApi.PTransform transformProto = PTransformTranslation.toProto(transform, sdkComponents);
List<PCollectionView<?>> views = Lists.newArrayList();
Map<String, SideInput> sideInputs = getWriteFilesPayload(transform).getSideInputsMap();
for (Map.Entry<String, SideInput> entry : sideInputs.entrySet()) {
PCollection<?> originalPCollection =
checkNotNull(
(PCollection<?>) transform.getInputs().get(new TupleTag<>(entry.getKey())),
"no input with tag %s",
entry.getKey());
views.add(
PCollectionViewTranslation.viewFromProto(
entry.getValue(),
entry.getKey(),
originalPCollection,
transformProto,
RehydratedComponents.forComponents(sdkComponents.toComponents())));
}
return views;
}
public static <T, DestinationT> boolean isWindowedWrites(
AppliedPTransform<
PCollection<T>, WriteFilesResult<DestinationT>,
? extends PTransform<PCollection<T>, WriteFilesResult<DestinationT>>>
transform)
throws IOException {
return getWriteFilesPayload(transform).getWindowedWrites();
}
public static <T, DestinationT> boolean isRunnerDeterminedSharding(
AppliedPTransform<
PCollection<T>, WriteFilesResult<DestinationT>,
? extends PTransform<PCollection<T>, WriteFilesResult<DestinationT>>>
transform)
throws IOException {
return getWriteFilesPayload(transform).getRunnerDeterminedSharding();
}
private static <T, DestinationT> WriteFilesPayload getWriteFilesPayload(
AppliedPTransform<
PCollection<T>, WriteFilesResult<DestinationT>,
? extends PTransform<PCollection<T>, WriteFilesResult<DestinationT>>>
transform)
throws IOException {
return WriteFilesPayload.parseFrom(
PTransformTranslation.toProto(transform, Collections.emptyList(), SdkComponents.create())
.getSpec()
.getPayload());
}
static class RawWriteFiles extends PTransformTranslation.RawPTransform<PInput, POutput>
implements WriteFilesLike {
private final RunnerApi.PTransform protoTransform;
private final transient RehydratedComponents rehydratedComponents;
// Parsed from protoTransform and cached
private final FunctionSpec spec;
private final RunnerApi.WriteFilesPayload payload;
public RawWriteFiles(
RunnerApi.PTransform protoTransform, RehydratedComponents rehydratedComponents)
throws IOException {
this.rehydratedComponents = rehydratedComponents;
this.protoTransform = protoTransform;
this.spec = protoTransform.getSpec();
this.payload = RunnerApi.WriteFilesPayload.parseFrom(spec.getPayload());
}
@Override
public FunctionSpec getSpec() {
return spec;
}
@Override
public FunctionSpec migrate(SdkComponents components) throws IOException {
return FunctionSpec.newBuilder()
.setUrn(WRITE_FILES_TRANSFORM_URN)
.setPayload(payloadForWriteFilesLike(this, components).toByteString())
.build();
}
@Override
public Map<TupleTag<?>, PValue> getAdditionalInputs() {
Map<TupleTag<?>, PValue> additionalInputs = new HashMap<>();
for (Map.Entry<String, SideInput> sideInputEntry : payload.getSideInputsMap().entrySet()) {
try {
additionalInputs.put(
new TupleTag<>(sideInputEntry.getKey()),
rehydratedComponents.getPCollection(
protoTransform.getInputsOrThrow(sideInputEntry.getKey())));
} catch (IOException exc) {
throw new IllegalStateException(
String.format(
"Could not find input with name %s for %s transform",
sideInputEntry.getKey(), WriteFiles.class.getSimpleName()));
}
}
return additionalInputs;
}
@Override
public SdkFunctionSpec translateSink(SdkComponents newComponents) {
// TODO: re-register the environment with the new components
return payload.getSink();
}
@Override
public Map<String, SideInput> translateSideInputs(SdkComponents components) {
// TODO: re-register the PCollections and UDF environments
return MoreObjects.firstNonNull(
payload.getSideInputsMap(), Collections.<String, SideInput>emptyMap());
}
@Override
public boolean isWindowedWrites() {
return payload.getWindowedWrites();
}
@Override
public boolean isRunnerDeterminedSharding() {
return payload.getRunnerDeterminedSharding();
}
}
static class WriteFilesTranslator implements TransformPayloadTranslator<WriteFiles<?, ?, ?>> {
@Override
public String getUrn(WriteFiles<?, ?, ?> transform) {
return WRITE_FILES_TRANSFORM_URN;
}
@Override
public FunctionSpec translate(
AppliedPTransform<?, ?, WriteFiles<?, ?, ?>> transform, SdkComponents components)
throws IOException {
return FunctionSpec.newBuilder()
.setUrn(getUrn(transform.getTransform()))
.setPayload(payloadForWriteFiles(transform.getTransform(), components).toByteString())
.build();
}
@Override
public PTransformTranslation.RawPTransform<?, ?> rehydrate(
RunnerApi.PTransform protoTransform, RehydratedComponents rehydratedComponents)
throws IOException {
return new RawWriteFiles(protoTransform, rehydratedComponents);
}
}
/** Registers {@link WriteFilesTranslator}. */
@AutoService(TransformPayloadTranslatorRegistrar.class)
public static class Registrar implements TransformPayloadTranslatorRegistrar {
@Override
public Map<Class<? extends PTransform>, TransformPayloadTranslator>
getTransformPayloadTranslators() {
return Collections.singletonMap(WriteFiles.CONCRETE_CLASS, new WriteFilesTranslator());
}
@Override
public Map<String, ? extends TransformPayloadTranslator> getTransformRehydrators() {
return Collections.singletonMap(WRITE_FILES_TRANSFORM_URN, new WriteFilesTranslator());
}
}
/** These methods drive to-proto translation from Java and from rehydrated WriteFiles. */
private interface WriteFilesLike {
SdkFunctionSpec translateSink(SdkComponents newComponents);
Map<String, RunnerApi.SideInput> translateSideInputs(SdkComponents components);
boolean isWindowedWrites();
boolean isRunnerDeterminedSharding();
}
public static WriteFilesPayload payloadForWriteFilesLike(
WriteFilesLike writeFiles, SdkComponents components) throws IOException {
return WriteFilesPayload.newBuilder()
.setSink(writeFiles.translateSink(components))
.putAllSideInputs(writeFiles.translateSideInputs(components))
.setWindowedWrites(writeFiles.isWindowedWrites())
.setRunnerDeterminedSharding(writeFiles.isRunnerDeterminedSharding())
.build();
}
}
| apache-2.0 |
consulo/consulo-spring | aop-common/tests/com/intellij/aop/psi/AopNavigationTestCase.java | 3666 | /*
* Copyright (c) 2000-2005 by JetBrains s.r.o. All Rights Reserved.
* Use is subject to license terms.
*/
package com.intellij.aop.psi;
import com.intellij.aop.AopLiteFixture;
import com.intellij.codeInsight.daemon.LineMarkerInfo;
import com.intellij.codeInsight.navigation.NavigationGutterIconRenderer;
import com.intellij.openapi.editor.markup.GutterIconRenderer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiMethod;
import com.intellij.psi.xml.XmlTag;
import com.intellij.testFramework.UsefulTestCase;
import com.intellij.testFramework.fixtures.JavaCodeInsightFixtureTestCase;
import com.intellij.util.Consumer;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import gnu.trove.THashSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
* @author peter
*/
public abstract class AopNavigationTestCase extends JavaCodeInsightFixtureTestCase {
protected void setUp() throws Exception {
super.setUp();
AopLiteFixture.addAopAnnotations(myFixture);
}
public void checkNavigation(final String filePath, final boolean orderMatters, String... expected) throws Throwable {
List<Collection<String>> targets = new ArrayList<Collection<String>>();
for (final GutterIconRenderer renderer : myFixture.findAllGutters(filePath)) {
processGutterIcon(orderMatters, targets, renderer);
if (renderer instanceof LineMarkerInfo.LineMarkerGutterIconRenderer) {
final LineMarkerInfo.LineMarkerGutterIconRenderer iconRenderer = (LineMarkerInfo.LineMarkerGutterIconRenderer)renderer;
processGutterIcon(orderMatters, targets, iconRenderer.getLineMarkerInfo().getNavigationHandler());
}
}
Consumer<Collection<String>>[] checkers = ContainerUtil.map2Array(expected, Consumer.class, new Function<String, Consumer>() {
public Consumer fun(final String s) {
return new Consumer<Collection<String>>() {
public void consume(final Collection<String> o) {
final String[] navItems = StringUtil.isEmpty(s) ? new String[0] : s.split("\n");
if (orderMatters) {
UsefulTestCase.assertOrderedEquals(o, navItems);
} else {
UsefulTestCase.assertSameElements(o, navItems);
}
}
};
}
});
UsefulTestCase.assertOrderedCollection(targets, checkers);
}
private static void processGutterIcon(final boolean orderMatters, final List<Collection<String>> targets, final Object renderer) {
if (renderer instanceof NavigationGutterIconRenderer) {
final NavigationGutterIconRenderer navRenderer = (NavigationGutterIconRenderer)renderer;
final List<PsiElement> elements = navRenderer.getTargetElements();
Collection<String> toStrings = ContainerUtil.map(elements, new Function<PsiElement, String>() {
public String fun(final PsiElement element) {
if (element instanceof PsiClass) {
return ((PsiClass)element).getQualifiedName();
}
if (element instanceof PsiMethod) {
final PsiMethod method = (PsiMethod)element;
return method.getContainingClass().getQualifiedName() + "#" + method.getName() + method.getParameterList().getText();
}
if (element instanceof XmlTag) {
return ((XmlTag)element).getName();
}
throw new AssertionError(element);
}
});
if (!orderMatters) {
toStrings = new THashSet<String>(toStrings);
}
targets.add(toStrings);
}
}
}
| apache-2.0 |
azwickey-pivotal/SpringTrader | springboottrades-quotes/src/main/java/io/pivotal/quotes/controller/QuoteController.java | 3722 | package io.pivotal.quotes.controller;
import java.io.IOException;
import java.util.List;
import javax.servlet.http.HttpServletResponse;
import io.pivotal.quotes.domain.CompanyInfo;
import io.pivotal.quotes.domain.Quote;
import io.pivotal.quotes.exception.SymbolNotFoundException;
import io.pivotal.quotes.service.QuoteService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
/**
* Rest Controller providing the REST API for the Quote Service.
* Provides two calls (both HTTP GET methods):
* - /quote/{symbol} - Retrieves the current quote for a given symbol.
* - /company/{name} - Retrieves a list of company information for companies that match the {name}.
* @author David Ferreira Pinto
*
*/
@RestController
public class QuoteController {
private static final Logger logger = LoggerFactory
.getLogger(QuoteController.class);
/**
* The service to delegate calls to.
*/
@Autowired
private QuoteService service;
/**
* Retrives the current quote for the given symbol.
*
* @param symbol The symbol to retrieve the quote for.
* @return The Quote
* @throws SymbolNotFoundException if the symbol is not valid.
*/
@RequestMapping(value = "/quote/{symbol}", method = RequestMethod.GET)
public ResponseEntity<Quote> getQuote(@PathVariable("symbol") final String symbol) throws SymbolNotFoundException {
logger.debug("QuoteController.getQuote: retrieving quote for: " + symbol);
Quote quote = service.getQuote(symbol);
logger.info(String.format("Retrieved symbol: %s with quote %s", symbol, quote));
return new ResponseEntity<Quote>(quote,
getNoCacheHeaders(), HttpStatus.OK);
}
/**
* Searches for companies that have a name or symbol matching the parameter.
*
* @param name The name or symbol to search for.
* @return The list of companies that match the search parameter.
*/
@RequestMapping(value = "/company/{name}", method = RequestMethod.GET)
public ResponseEntity<List<CompanyInfo>> getCompanies(@PathVariable("name") final String name) {
logger.debug("QuoteController.getCompanies: retrieving companies for: " + name);
List<CompanyInfo> companies = service.getCompanyInfo(name);
logger.info(String.format("Retrieved companies with search parameter: %s - list: {}", name), companies);
return new ResponseEntity<List<CompanyInfo>>(companies, HttpStatus.OK);
}
/**
* Generates HttpHeaders that have the no-cache set.
* @return HttpHeaders.
*/
private HttpHeaders getNoCacheHeaders() {
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.set("Cache-Control", "no-cache");
return responseHeaders;
}
/**
* Handles the response to the client if there is any exception during the processing of HTTP requests.
*
* @param e The exception thrown during the processing of the request.
* @param response The HttpResponse object.
* @throws IOException
*/
@ExceptionHandler({Exception.class})
public void handleException(Exception e, HttpServletResponse response) throws IOException {
logger.warn("Handle Error: " + e.getMessage());
e.printStackTrace();
response.sendError(HttpStatus.BAD_REQUEST.value(), "ERROR: " + e.getMessage());
//return "ERROR: " + e.getMessage();
}
}
| apache-2.0 |
apache/commons-collections | src/main/java/org/apache/commons/collections4/functors/TruePredicate.java | 1987 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.collections4.functors;
import java.io.Serializable;
import org.apache.commons.collections4.Predicate;
/**
* Predicate implementation that always returns true.
*
* @since 3.0
*/
public final class TruePredicate<T> implements Predicate<T>, Serializable {
/** Serial version UID */
private static final long serialVersionUID = 3374767158756189740L;
/** Singleton predicate instance */
@SuppressWarnings("rawtypes")
public static final Predicate INSTANCE = new TruePredicate<>();
/**
* Factory returning the singleton instance.
*
* @param <T> the type that the predicate queries
* @return the singleton instance
* @since 3.1
*/
public static <T> Predicate<T> truePredicate() {
return INSTANCE;
}
/**
* Restricted constructor.
*/
private TruePredicate() {
}
/**
* Evaluates the predicate returning true always.
*
* @param object the input object
* @return true always
*/
@Override
public boolean evaluate(final T object) {
return true;
}
private Object readResolve() {
return INSTANCE;
}
}
| apache-2.0 |
rozza/mongo-java-driver | driver-core/src/main/com/mongodb/internal/bulk/DeleteRequest.java | 3540 | /*
* Copyright 2008-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mongodb.internal.bulk;
import com.mongodb.client.model.Collation;
import org.bson.BsonDocument;
import org.bson.conversions.Bson;
import static com.mongodb.assertions.Assertions.notNull;
/**
* A representation of a delete.
*
* @since 3.0
*/
public final class DeleteRequest extends WriteRequest {
private final BsonDocument filter;
private boolean isMulti = true;
private Collation collation;
private Bson hint;
private String hintString;
/**
* Construct a new instance.
*
* @param filter the non-null query filter
*/
public DeleteRequest(final BsonDocument filter) {
super();
this.filter = notNull("filter", filter);
}
/**
* Gets the query Object filter.
*
* @return the Object filter
*/
public BsonDocument getFilter() {
return filter;
}
/**
* Sets whether all documents matching the query filter will be removed.
*
* @param isMulti true if all documents matching the query filter will be removed
* @return this
*/
public DeleteRequest multi(final boolean isMulti) {
this.isMulti = isMulti;
return this;
}
/**
* Gets whether all documents matching the query filter will be removed. The default is true.
*
* @return whether all documents matching the query filter will be removed
*/
public boolean isMulti() {
return isMulti;
}
/**
* Returns the collation options
*
* @return the collation options
*/
public Collation getCollation() {
return collation;
}
/**
* Sets the collation options
*
* <p>A null value represents the server default.</p>
* @param collation the collation options to use
* @return this
*/
public DeleteRequest collation(final Collation collation) {
this.collation = collation;
return this;
}
/**
* Returns the hint for which index to use. The default is not to set a hint.
*
* @return the hint
*/
public Bson getHint() {
return hint;
}
/**
* Sets the hint for which index to use. A null value means no hint is set.
*
* @param hint the hint
* @return this
*/
public DeleteRequest hint(final Bson hint) {
this.hint = hint;
return this;
}
/**
* Gets the hint string to apply.
*
* @return the hint string, which should be the name of an existing index
*/
public String getHintString() {
return hintString;
}
/**
* Sets the hint to apply.
*
* @param hint the name of the index which should be used for the operation
* @return this
*/
public DeleteRequest hintString(final String hint) {
this.hintString = hint;
return this;
}
@Override
public Type getType() {
return Type.DELETE;
}
}
| apache-2.0 |
jdeppe-pivotal/geode | geode-core/src/integrationTest/java/org/apache/geode/internal/cache/diskPerf/DiskRegOverflowSyncJUnitPerformanceTest.java | 5029 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.diskPerf;
import static org.junit.Assert.assertEquals;
import java.util.Arrays;
import org.junit.Test;
import org.apache.geode.LogWriter;
import org.apache.geode.cache.Region;
import org.apache.geode.internal.cache.DiskRegionHelperFactory;
import org.apache.geode.internal.cache.DiskRegionProperties;
import org.apache.geode.internal.cache.DiskRegionTestingBase;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.internal.cache.eviction.EvictionCounters;
/**
* Disk region Perf test for Overflow only with Sync writes. 1) Performance of Put operation causing
* an eviction. 2) Performance of Get operation for entry which will fault in.
*/
public class DiskRegOverflowSyncJUnitPerformanceTest extends DiskRegionTestingBase {
private static final int ENTRY_SIZE = 1024 * 5;
/**
* Do not change the value OP_COUNT = 400 The test case is dependent on this value.
*/
private static final int OP_COUNT = 400;
private static final int HALF_OP_COUNT = OP_COUNT / 2;
private static final int counter = 0;
private LogWriter log = null;
private final DiskRegionProperties diskProps = new DiskRegionProperties();
@Override
protected final void postSetUp() throws Exception {
diskProps.setDiskDirs(dirs);
diskProps.setOverFlowCapacity(1000);
region = DiskRegionHelperFactory.getSyncOverFlowOnlyRegion(cache, diskProps);
log = ds.getLogWriter();
}
@Override
protected final void postTearDown() throws Exception {
if (cache != null) {
cache.close();
}
if (ds != null) {
ds.disconnect();
}
}
@Test
public void testPopulatefor5Kbwrites() throws Exception {
// RegionAttributes ra = region.getAttributes();
EvictionCounters lruStats = getLRUStats(region);
// Put in larger stuff until we start evicting
int total;
for (total = 0; lruStats.getEvictions() <= 0; total++) {
log.info("DEBUG: total " + total + ", evictions " + lruStats.getEvictions());
int[] array = new int[250];
array[0] = total;
region.put(total, array);
}
assertEquals(1, lruStats.getEvictions());
// put another 1mb data which will evicted to disk.
// final String key = "K";
final byte[] value = new byte[ENTRY_SIZE];
Arrays.fill(value, (byte) 77);
for (int i = 0; i < HALF_OP_COUNT; i++) {
log.info("DEBUG: total " + total + ", evictions " + lruStats.getEvictions());
region.put("" + i, value);
}
assertEquals(201, lruStats.getEvictions());
// the next puts will be written to disk
long startTime = System.currentTimeMillis();
for (int i = 201; i < OP_COUNT; i++) {
region.put("" + i, value);
}
long endTime = System.currentTimeMillis();
System.out.println(" done with putting");
// Now get all the entries which are on disk.
long startTimeGet = System.currentTimeMillis();
for (int i = 0; i < HALF_OP_COUNT; i++) {
region.get("" + i);
}
long endTimeGet = System.currentTimeMillis();
System.out.println(" done with getting");
region.close(); // closes disk file which will flush all
// buffers
float et = endTime - startTime;
float etSecs = et / 1000f;
float opPerSec = etSecs == 0 ? 0 : (OP_COUNT / (et / 1000f));
float bytesPerSec = etSecs == 0 ? 0 : ((OP_COUNT * ENTRY_SIZE) / (et / 1000f));
String stats = "et=" + et + "ms writes/sec=" + opPerSec + " bytes/sec=" + bytesPerSec;
log.info(stats);
System.out.println("Stats for 5kb writes: Perf of Put which is cauing eviction :" + stats);
// Perf stats for get op
float etGet = endTimeGet - startTimeGet;
float etSecsGet = etGet / 1000f;
float opPerSecGet = etSecsGet == 0 ? 0 : (OP_COUNT / (etGet / 1000f));
float bytesPerSecGet = etSecsGet == 0 ? 0 : ((OP_COUNT * ENTRY_SIZE) / (etGet / 1000f));
String statsGet =
"etGet=" + etGet + "ms gets/sec=" + opPerSecGet + " bytes/sec=" + bytesPerSecGet;
log.info(statsGet);
System.out.println("Perf Stats of get which is fauting in :" + statsGet);
}
private EvictionCounters getLRUStats(Region region) {
return ((LocalRegion) region).getEvictionController().getCounters();
}
}
| apache-2.0 |
pryzach/midao | midao-jdbc-core-jdbc4-test/src/test/java/org/midao/jdbc/core/db/mssql/UpdateTest.java | 10116 | /*
* Copyright 2013 Zakhar Prykhoda
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.midao.jdbc.core.db.mssql;
import org.midao.jdbc.core.MjdbcFactory;
import org.midao.jdbc.core.db.DBConstants;
import org.midao.jdbc.core.db.DBUpdate;
import org.midao.jdbc.core.db.DBUpdateQueryStructure;
import org.midao.jdbc.core.db.QueryStructure;
import org.midao.jdbc.core.service.QueryRunnerService;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
public class UpdateTest extends BaseMSSQL {
public void testGeneratedKeys() throws SQLException {
if (this.checkConnected(dbName) == false) {
return;
}
QueryRunnerService runner = null;
Map<String, Object> values = new HashMap<String, Object>();
final QueryStructure defaultStructure = DBUpdateQueryStructure.updateGeneratedKeysDS(values);
QueryStructure structure = new QueryStructure(values) {
@Override
public void create(QueryRunnerService runner) throws SQLException {
this.values.put("createUpdatedCount", (Integer) runner.update(DBConstants.CREATE_STUDENT_TABLE_MSSQL));
}
@Override
public void execute(QueryRunnerService runner) throws SQLException {
defaultStructure.execute(runner);
}
@Override
public void drop(QueryRunnerService runner) throws SQLException {
defaultStructure.drop(runner);
}
};
runner = MjdbcFactory.getQueryRunner(this.dataSource);
DBUpdate.updateGeneratedKeysDS(structure, runner);
runner = MjdbcFactory.getQueryRunner(this.conn);
DBUpdate.updateGeneratedKeysDS(structure, runner);
}
public void testXmlGeneratedKeys() throws SQLException {
if (this.checkConnected(dbName) == false) {
return;
}
QueryRunnerService runner = null;
Map<String, Object> values = new HashMap<String, Object>();
final QueryStructure defaultStructure = DBUpdateQueryStructure.updateXmlGeneratedKeysDS(values);
QueryStructure structure = new QueryStructure(values) {
@Override
public void create(QueryRunnerService runner) throws SQLException {
this.values.put("createUpdatedCount", (Integer) runner.update(DBConstants.CREATE_STUDENT_TABLE_MSSQL));
}
@Override
public void execute(QueryRunnerService runner) throws SQLException {
defaultStructure.execute(runner);
}
@Override
public void drop(QueryRunnerService runner) throws SQLException {
defaultStructure.drop(runner);
}
};
runner = MjdbcFactory.getQueryRunner(this.dataSource);
DBUpdate.updateGeneratedKeysDS(structure, runner);
runner = MjdbcFactory.getQueryRunner(this.conn);
DBUpdate.updateGeneratedKeysDS(structure, runner);
}
public void testRowCountHandler() throws SQLException {
if (this.checkConnected(dbName) == false) {
return;
}
QueryRunnerService runner = null;
Map<String, Object> values = new HashMap<String, Object>();
final QueryStructure defaultStructure = DBUpdateQueryStructure.updateRowCountHandlerDS(values);
QueryStructure structure = new QueryStructure(values) {
@Override
public void create(QueryRunnerService runner) throws SQLException {
this.values.put("createUpdatedCount", (Integer) runner.update(DBConstants.CREATE_STUDENT_TABLE_MSSQL));
}
@Override
public void execute(QueryRunnerService runner) throws SQLException {
defaultStructure.execute(runner);
}
@Override
public void drop(QueryRunnerService runner) throws SQLException {
defaultStructure.drop(runner);
}
};
runner = MjdbcFactory.getQueryRunner(this.dataSource);
DBUpdate.updateRowCountHandlerDS(structure, runner);
runner = MjdbcFactory.getQueryRunner(this.conn);
DBUpdate.updateRowCountHandlerDS(structure, runner);
}
public void testUpdateWithParams() throws SQLException {
if (this.checkConnected(dbName) == false) {
return;
}
QueryRunnerService runner = null;
Map<String, Object> values = new HashMap<String, Object>();
final QueryStructure defaultStructure = DBUpdateQueryStructure.updateWParamsDS(values);
QueryStructure structure = new QueryStructure(values) {
@Override
public void create(QueryRunnerService runner) throws SQLException {
this.values.put("createUpdatedCount", (Integer) runner.update(DBConstants.CREATE_STUDENT_TABLE_MSSQL));
}
@Override
public void execute(QueryRunnerService runner) throws SQLException {
defaultStructure.execute(runner);
}
@Override
public void drop(QueryRunnerService runner) throws SQLException {
defaultStructure.drop(runner);
}
};
runner = MjdbcFactory.getQueryRunner(this.dataSource);
DBUpdate.updateWParamsDS(structure, runner);
runner = MjdbcFactory.getQueryRunner(this.conn);
DBUpdate.updateWParamsDS(structure, runner);
}
public void testInputHandler1DS() throws SQLException {
if (this.checkConnected(dbName) == false) {
return;
}
QueryRunnerService runner = null;
Map<String, Object> values = new HashMap<String, Object>();
final QueryStructure defaultStructure = DBUpdateQueryStructure.updateInputHandler1DS(values);
QueryStructure structure = new QueryStructure(values) {
@Override
public void create(QueryRunnerService runner) throws SQLException {
this.values.put("createUpdatedCount", (Integer) runner.update(DBConstants.CREATE_STUDENT_TABLE_MSSQL));
}
@Override
public void execute(QueryRunnerService runner) throws SQLException {
defaultStructure.execute(runner);
}
@Override
public void drop(QueryRunnerService runner) throws SQLException {
defaultStructure.drop(runner);
}
};
runner = MjdbcFactory.getQueryRunner(this.dataSource);
DBUpdate.updateInputHandler1DS(structure, runner);
runner = MjdbcFactory.getQueryRunner(this.conn);
DBUpdate.updateInputHandler1DS(structure, runner);
}
public void testInputHandler2DS() throws SQLException {
if (this.checkConnected(dbName) == false) {
return;
}
QueryRunnerService runner = null;
Map<String, Object> values = new HashMap<String, Object>();
final QueryStructure defaultStructure = DBUpdateQueryStructure.updateInputHandler2DS(values);
QueryStructure structure = new QueryStructure(values) {
@Override
public void create(QueryRunnerService runner) throws SQLException {
this.values.put("createUpdatedCount", (Integer) runner.update(DBConstants.CREATE_STUDENT_TABLE_MSSQL));
}
@Override
public void execute(QueryRunnerService runner) throws SQLException {
defaultStructure.execute(runner);
}
@Override
public void drop(QueryRunnerService runner) throws SQLException {
defaultStructure.drop(runner);
}
};
runner = MjdbcFactory.getQueryRunner(this.dataSource);
DBUpdate.updateInputHandler2DS(structure, runner);
runner = MjdbcFactory.getQueryRunner(this.conn);
DBUpdate.updateInputHandler2DS(structure, runner);
}
public void testInputHandler3DS() throws SQLException {
if (this.checkConnected(dbName) == false) {
return;
}
QueryRunnerService runner = null;
Map<String, Object> values = new HashMap<String, Object>();
final QueryStructure defaultStructure = DBUpdateQueryStructure.updateInputHandler3DS(values);
QueryStructure structure = new QueryStructure(values) {
@Override
public void create(QueryRunnerService runner) throws SQLException {
this.values.put("createUpdatedCount", (Integer) runner.update(DBConstants.CREATE_STUDENT_TABLE_MSSQL));
}
@Override
public void execute(QueryRunnerService runner) throws SQLException {
defaultStructure.execute(runner);
}
@Override
public void drop(QueryRunnerService runner) throws SQLException {
defaultStructure.drop(runner);
}
};
runner = MjdbcFactory.getQueryRunner(this.dataSource);
DBUpdate.updateInputHandler3DS(structure, runner);
runner = MjdbcFactory.getQueryRunner(this.conn);
DBUpdate.updateInputHandler3DS(structure, runner);
}
}
| apache-2.0 |
lihongjie/spring-tutorial | spring-security-tutorial/SpringSecurityCusotmLoginFormAnnotationExample/src/main/java/com/websystique/springsecurity/configuration/SpringMvcInitializer.java | 568 | package com.websystique.springsecurity.configuration;
import org.springframework.web.servlet.support.AbstractAnnotationConfigDispatcherServletInitializer;
public class SpringMvcInitializer extends AbstractAnnotationConfigDispatcherServletInitializer {
@Override
protected Class<?>[] getRootConfigClasses() {
return new Class[] { HelloWorldConfiguration.class };
}
@Override
protected Class<?>[] getServletConfigClasses() {
return null;
}
@Override
protected String[] getServletMappings() {
return new String[] { "/" };
}
}
| apache-2.0 |
mesutcelik/hazelcast | hazelcast/src/test/java/com/hazelcast/internal/eviction/CacheExpirationStressTest.java | 5526 | /*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.eviction;
import com.hazelcast.cache.HazelcastExpiryPolicy;
import com.hazelcast.config.CacheConfig;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.test.HazelcastSerialClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.OverridePropertyRule;
import com.hazelcast.test.TestHazelcastInstanceFactory;
import com.hazelcast.test.annotation.NightlyTest;
import com.hazelcast.test.backup.BackupAccessor;
import com.hazelcast.test.backup.TestBackupUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import javax.cache.Cache;
import javax.cache.configuration.FactoryBuilder;
import javax.cache.event.CacheEntryExpiredListener;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.hazelcast.cache.CacheTestSupport.createServerCachingProvider;
import static com.hazelcast.cache.impl.eviction.CacheClearExpiredRecordsTask.PROP_TASK_PERIOD_SECONDS;
import static com.hazelcast.test.OverridePropertyRule.set;
import static com.hazelcast.test.backup.TestBackupUtils.assertBackupSizeEventually;
@RunWith(HazelcastSerialClassRunner.class)
@Category(NightlyTest.class)
public class CacheExpirationStressTest extends HazelcastTestSupport {
@Rule
public final OverridePropertyRule overrideTaskSecondsRule = set(PROP_TASK_PERIOD_SECONDS, "2");
protected final String cacheName = "test";
protected String cacheNameWithPrefix;
private static final int CLUSTER_SIZE = 5;
private static final int KEY_RANGE = 100000;
private HazelcastInstance[] instances = new HazelcastInstance[CLUSTER_SIZE];
private TestHazelcastInstanceFactory factory;
private Random random = new Random();
private final AtomicBoolean done = new AtomicBoolean();
private final int DURATION_SECONDS = 60;
@Before
public void setup() {
factory = createHazelcastInstanceFactory(CLUSTER_SIZE);
for (int i = 0; i < CLUSTER_SIZE; i++) {
instances[i] = factory.newHazelcastInstance(getConfig());
}
}
protected CacheConfig getCacheConfig() {
CacheConfig cacheConfig = new CacheConfig();
cacheConfig.setExpiryPolicyFactory(FactoryBuilder.factoryOf(new HazelcastExpiryPolicy(1000, 1000, 1000)));
cacheConfig.setName(cacheName);
cacheConfig.setBackupCount(CLUSTER_SIZE - 1);
return cacheConfig;
}
@Test
public void test() throws InterruptedException {
assertClusterSize(CLUSTER_SIZE, instances);
List<Thread> list = new ArrayList<>();
for (int i = 0; i < CLUSTER_SIZE; i++) {
CacheConfig cacheConfig = getCacheConfig();
Cache cache = createServerCachingProvider(instances[i])
.getCacheManager().createCache(cacheName, cacheConfig);
cacheNameWithPrefix = cache.getName();
list.add(new Thread(new TestRunner(cache, done)));
}
for (Thread thread: list) {
thread.start();
}
sleepAtLeastSeconds(DURATION_SECONDS);
done.set(true);
for (Thread thread: list) {
thread.join();
}
assertRecords(instances);
}
protected void assertRecords(final HazelcastInstance[] instances) {
for (int i = 1; i < instances.length; i++) {
BackupAccessor backupAccessor = TestBackupUtils.newCacheAccessor(instances, cacheNameWithPrefix, i);
assertBackupSizeEventually(0, backupAccessor);
}
for (int i = 0; i < instances.length; i++) {
final int index = i;
assertEqualsEventually(() -> instances[index].getCacheManager().getCache(cacheName).size(), 0);
}
instances[0].getCacheManager().getCache(cacheName).destroy();
}
protected void doOp(Cache cache) {
int op = random.nextInt(3);
int key = random.nextInt(KEY_RANGE);
int val = random.nextInt(KEY_RANGE);
switch (op) {
case 0:
cache.put(key, val);
break;
case 1:
cache.remove(key);
break;
case 2:
cache.get(key);
break;
default:
cache.get(key);
break;
}
}
class TestRunner implements Runnable {
private Cache cache;
private AtomicBoolean done;
private CacheEntryExpiredListener listener;
TestRunner(Cache cache, AtomicBoolean done) {
this.cache = cache;
this.done = done;
}
@Override
public void run() {
while (!done.get()) {
doOp(cache);
}
}
}
}
| apache-2.0 |
archord/svom | src/main/java/com/gwac/action/GwacFileReceive.java | 6600 | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.gwac.action;
/**
*
* @author xy
*/
import com.gwac.dao.DataProcessMachineDAO;
import com.gwac.dao.SyncFileDao;
import com.gwac.model.SyncFile;
import static com.opensymphony.xwork2.Action.ERROR;
import static com.opensymphony.xwork2.Action.INPUT;
import static com.opensymphony.xwork2.Action.SUCCESS;
import com.opensymphony.xwork2.ActionContext;
import com.opensymphony.xwork2.ActionSupport;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.struts2.convention.annotation.Action;
import org.apache.struts2.convention.annotation.Result;
/*parameter:currentDirectory, configFile, [fileUpload], [fileUpload].*/
/* curl command example: */
/* curl -F currentDirectory=dirName */
/* -F configFile=@configFileName */
/* -F fileUpload=@simulationUI2.tar.gz */
/* -F fileUpload=@simulationUI.tar.gz http://localhost:8080/svom/resultAction.action*/
/**
* @author xy
*/
//@InterceptorRef("jsonValidationWorkflowStack")
//加了这句化,文件传不上来
public class GwacFileReceive extends ActionSupport {
private static final Log log = LogFactory.getLog(GwacFileReceive.class);
private DataProcessMachineDAO dpmDao;
private SyncFileDao sfDao;
private List<File> fileUpload = new ArrayList<File>();
private List<String> fileUploadContentType = new ArrayList<String>();
private List<String> fileUploadFileName = new ArrayList<String>();
private String echo = "";
@Action(value = "gwacFileReceive", results = {
@Result(location = "manage/result.jsp", name = SUCCESS),
@Result(location = "manage/result.jsp", name = INPUT),
@Result(location = "manage/result.jsp", name = ERROR)})
public String upload() throws Exception {
boolean flag = true;
String result = SUCCESS;
if (fileUpload.isEmpty()) {
setEcho(echo + "Error, must upload data file(fileUpload).\n");
flag = false;
}
if (fileUpload.size() != fileUploadFileName.size()) {
setEcho(echo + "Error,please check upload command and retry!\n");
flag = false;
}
if (flag) {
String destPath = getText("gwac.data.root.directory");
if (destPath.charAt(destPath.length() - 1) != '/') {
destPath += "/";
}
//接收参数配置文件
String storeDirName = getText("gwac.monitorimage.directory");
String otDstPath = destPath + storeDirName + "/";
File tDir = new File(otDstPath);
if (!tDir.exists()) {
tDir.mkdirs();
}
//接受文件
int i = 0;
for (File file : fileUpload) {
String tfilename = fileUploadFileName.get(i++).trim();
if (tfilename.isEmpty()) {
continue;
}
File destFile = new File(otDstPath, tfilename);
log.debug("receive file " + tfilename);
//如果存在,必须删除,否则FileUtils.moveFile报错FileExistsException
if (destFile.exists()) {
log.warn(destFile + " already exist, delete it.");
FileUtils.forceDelete(destFile);
}
if (file != null && file.exists()) {
FileUtils.moveFile(file, destFile);
}
if (tfilename.endsWith("ccdimg.jpg")) {
try {
int dpmId = Integer.parseInt(tfilename.substring(1, 3));
dpmDao.updateMonitorImageTime(dpmId);
} catch (NumberFormatException nfe) {
log.error("ccd img name " + tfilename + " wrong formate!");
}
}
SyncFile tsf = new SyncFile();
tsf.setFileName(tfilename);
tsf.setIsSync(false);
tsf.setIsSyncSuccess(false);
tsf.setPath(storeDirName);
tsf.setStoreTime(new Date());
sfDao.save(tsf);
}
echo += "success\n";
} else {
result = ERROR;
}
log.debug(echo);
/* 如果使用struts2的标签,返回结果会有两个空行,这个显示在命令行不好看。
* 用jsp的out,则不会有两个空行。
* 在这里将结果信息存储在session中,在jsp页面获得返回信息。
*/
ActionContext ctx = ActionContext.getContext();
ctx.getSession().put("echo", getEcho());
return result;
}
public String display() {
return NONE;
}
/**
* @return the echo
*/
public String getEcho() {
return echo;
}
/**
* @param echo the echo to set
*/
public void setEcho(String echo) {
this.echo = echo;
}
/**
* @param dpmDao the dpmDao to set
*/
public void setDpmDao(DataProcessMachineDAO dpmDao) {
this.dpmDao = dpmDao;
}
/**
* @return the fileUpload
*/
public List<File> getFileUpload() {
return fileUpload;
}
/**
* @param fileUpload the fileUpload to set
*/
public void setFileUpload(List<File> fileUpload) {
this.fileUpload = fileUpload;
}
/**
* @return the fileUploadContentType
*/
public List<String> getFileUploadContentType() {
return fileUploadContentType;
}
/**
* @param fileUploadContentType the fileUploadContentType to set
*/
public void setFileUploadContentType(List<String> fileUploadContentType) {
this.fileUploadContentType = fileUploadContentType;
}
/**
* @return the fileUploadFileName
*/
public List<String> getFileUploadFileName() {
return fileUploadFileName;
}
/**
* @param fileUploadFileName the fileUploadFileName to set
*/
public void setFileUploadFileName(List<String> fileUploadFileName) {
this.fileUploadFileName = fileUploadFileName;
}
/**
* @return the sfDao
*/
public SyncFileDao getSfDao() {
return sfDao;
}
/**
* @param sfDao the sfDao to set
*/
public void setSfDao(SyncFileDao sfDao) {
this.sfDao = sfDao;
}
}
| apache-2.0 |
cts2/cts2-framework | cts2-webapp/src/main/java/edu/mayo/cts2/framework/webapp/rest/controller/ValueSetDefinitionController.java | 18002 | /*
* Copyright: (c) 2004-2011 Mayo Foundation for Medical Education and
* Research (MFMER). All rights reserved. MAYO, MAYO CLINIC, and the
* triple-shield Mayo logo are trademarks and service marks of MFMER.
*
* Except as contained in the copyright notice above, or as used to identify
* MFMER as the author of this software, the trade names, trademarks, service
* marks, or product names of the copyright holder shall not be used in
* advertising, promotion or otherwise in connection with this software without
* prior written authorization of the copyright holder.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.mayo.cts2.framework.webapp.rest.controller;
import edu.mayo.cts2.framework.model.command.Page;
import edu.mayo.cts2.framework.model.core.Message;
import edu.mayo.cts2.framework.model.core.VersionTagReference;
import edu.mayo.cts2.framework.model.extension.LocalIdValueSetDefinition;
import edu.mayo.cts2.framework.model.service.core.Query;
import edu.mayo.cts2.framework.model.service.exception.UnknownValueSetDefinition;
import edu.mayo.cts2.framework.model.util.ModelUtils;
import edu.mayo.cts2.framework.model.valuesetdefinition.ValueSetDefinition;
import edu.mayo.cts2.framework.model.valuesetdefinition.ValueSetDefinitionDirectory;
import edu.mayo.cts2.framework.model.valuesetdefinition.ValueSetDefinitionList;
import edu.mayo.cts2.framework.model.valuesetdefinition.ValueSetDefinitionMsg;
import edu.mayo.cts2.framework.service.command.restriction.ValueSetDefinitionQueryServiceRestrictions;
import edu.mayo.cts2.framework.service.profile.valuesetdefinition.ValueSetDefinitionMaintenanceService;
import edu.mayo.cts2.framework.service.profile.valuesetdefinition.ValueSetDefinitionQuery;
import edu.mayo.cts2.framework.service.profile.valuesetdefinition.ValueSetDefinitionQueryService;
import edu.mayo.cts2.framework.service.profile.valuesetdefinition.ValueSetDefinitionReadService;
import edu.mayo.cts2.framework.service.profile.valuesetdefinition.name.ValueSetDefinitionReadId;
import edu.mayo.cts2.framework.webapp.naming.TagResolver;
import edu.mayo.cts2.framework.webapp.rest.command.QueryControl;
import edu.mayo.cts2.framework.webapp.rest.command.RestFilter;
import edu.mayo.cts2.framework.webapp.rest.command.RestReadContext;
import edu.mayo.cts2.framework.webapp.rest.query.ValueSetDefinitionQueryBuilder;
import org.apache.commons.lang.StringUtils;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.util.UrlPathHelper;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.util.HashMap;
import java.util.Map;
/**
* The Class ValueSetDefinitionController.
*
* @author <a href="mailto:kevin.peterson@mayo.edu">Kevin Peterson</a>
*/
@Controller
public class ValueSetDefinitionController extends AbstractMessageWrappingController {
@Cts2Service
private ValueSetDefinitionQueryService valueSetDefinitionQueryService;
@Cts2Service
private ValueSetDefinitionReadService valueSetDefinitionReadService;
@Cts2Service
private ValueSetDefinitionMaintenanceService valueSetDefinitionMaintenanceService;
@Resource
private TagResolver tagResolver;
private final static UrlTemplateBinder<LocalIdValueSetDefinition> URL_BINDER =
new UrlTemplateBinder<LocalIdValueSetDefinition>(){
@Override
public Map<String,String> getPathValues(LocalIdValueSetDefinition resource) {
Map<String,String> returnMap = new HashMap<String,String>();
returnMap.put(VAR_VALUESETID, resource.getResource().getDefinedValueSet().getContent());
returnMap.put(VAR_VALUESETDEFINITIONID, resource.getLocalID());
return returnMap;
}
};
final static MessageFactory<LocalIdValueSetDefinition> MESSAGE_FACTORY =
new MessageFactory<LocalIdValueSetDefinition>() {
@Override
public Message createMessage(LocalIdValueSetDefinition resource) {
ValueSetDefinitionMsg msg = new ValueSetDefinitionMsg();
msg.setValueSetDefinition(resource.getResource());
return msg;
}
};
/**
* Gets the value set definitions of value set.
*
* @param httpServletRequest the http servlet request
* @param restrictions the restrictions
* @param resolvedFilter the filter
* @param page the page
* @param valueSetName the value set name
* @return the value set definitions of value set
*/
@RequestMapping(value={
PATH_VALUESETDEFINITIONS_OF_VALUESET}, method=RequestMethod.GET)
public Object getValueSetDefinitionsOfValueSet(
HttpServletRequest httpServletRequest,
RestReadContext restReadContext,
QueryControl queryControl,
ValueSetDefinitionQueryServiceRestrictions restrictions,
RestFilter restFilter,
Page page,
boolean list,
@PathVariable(VAR_VALUESETID) String valueSet) {
return this.getValueSetDefinitionsOfValueSet(
httpServletRequest,
restReadContext,
queryControl,
null,
restrictions,
restFilter,
page,
list,
valueSet);
}
/**
* Gets the value set definitions of value set.
*
* @param httpServletRequest the http servlet request
* @param query the query
* @param restrictions the restrictions
* @param resolvedFilter the filter
* @param page the page
* @param valueSetName the value set name
* @return the value set definitions of value set
*/
@RequestMapping(value={
PATH_VALUESETDEFINITIONS_OF_VALUESET}, method=RequestMethod.POST)
public Object getValueSetDefinitionsOfValueSet(
HttpServletRequest httpServletRequest,
RestReadContext restReadContext,
QueryControl queryControl,
@RequestBody Query query,
ValueSetDefinitionQueryServiceRestrictions restrictions,
RestFilter restFilter,
Page page,
boolean list,
@PathVariable(VAR_VALUESETID) String valueSet) {
restrictions.setValueSet(ModelUtils.nameOrUriFromEither(valueSet));
return this.getValueSetDefinitions(
httpServletRequest,
restReadContext,
queryControl,
query,
restrictions,
restFilter,
page,
list);
}
/**
* Does value set definition exist.
*
* @param httpServletResponse the http servlet response
* @param valueSetName the value set name
* @param valueSetDefinitionDocumentUri the value set definition document uri
*/
@RequestMapping(value=PATH_VALUESETDEFINITION_OF_VALUESET_BYID, method=RequestMethod.HEAD)
@ResponseBody
public void doesValueSetDefinitionExist(
HttpServletResponse httpServletResponse,
@PathVariable(VAR_VALUESETID) String valueSetName,
@PathVariable(VAR_VALUESETDEFINITIONID) String valueSetDefinitionDocumentUri) {
//
}
/**
* Gets the value set definitions of value set count.
*
* @param httpServletResponse the http servlet response
* @param query the query
* @param restrictions the restrictions
* @param resolvedFilter the filter
* @param valueSetName the value set name
* @return the value set definitions of value set count
*/
@RequestMapping(value={
PATH_VALUESETDEFINITIONS_OF_VALUESET}, method=RequestMethod.HEAD)
@ResponseBody
public void getValueSetDefinitionsOfValueSetCount(
HttpServletResponse httpServletResponse,
RestReadContext restReadContext,
ValueSetDefinitionQueryServiceRestrictions restrictions,
RestFilter restFilter,
@PathVariable(VAR_VALUESETID) String valueSet) {
restrictions.setValueSet(ModelUtils.nameOrUriFromEither(valueSet));
this.getValueSetDefinitionsCount(
httpServletResponse,
restReadContext,
restrictions,
restFilter);
}
@RequestMapping(value={
PATH_VALUESETDEFINITIONS}, method=RequestMethod.GET)
public Object getValueSetDefinitions(
HttpServletRequest httpServletRequest,
RestReadContext restReadContext,
QueryControl queryControl,
ValueSetDefinitionQueryServiceRestrictions restrictions,
RestFilter restFilter,
Page page,
boolean list) {
return this.getValueSetDefinitions(
httpServletRequest,
restReadContext,
queryControl,
null,
restrictions,
restFilter,
page,
list);
}
/**
* Gets the value set definitions.
*
* @param httpServletRequest the http servlet request
* @param query the query
* @param restrictions the restrictions
* @param resolvedFilter the filter
* @param page the page
* @return the value set definitions
*/
@RequestMapping(value={
PATH_VALUESETDEFINITIONS}, method=RequestMethod.POST)
public Object getValueSetDefinitions(
HttpServletRequest httpServletRequest,
RestReadContext restReadContext,
QueryControl queryControl,
@RequestBody Query query,
ValueSetDefinitionQueryServiceRestrictions restrictions,
RestFilter restFilter,
Page page,
boolean list) {
ValueSetDefinitionQueryBuilder builder = this.getNewResourceQueryBuilder();
ValueSetDefinitionQuery resourceQuery = builder.
addQuery(query).
addRestFilter(restFilter).
addRestrictions(restrictions).
addRestReadContext(restReadContext).
build();
return this.doQuery(
httpServletRequest,
list,
this.valueSetDefinitionQueryService,
resourceQuery,
page,
queryControl,
ValueSetDefinitionDirectory.class,
ValueSetDefinitionList.class);
}
/**
* Gets the value set definitions count.
*
* @param httpServletResponse the http servlet response
* @param query the query
* @param restrictions the restrictions
* @param resolvedFilter the filter
* @return the value set definitions count
*/
@RequestMapping(value={
PATH_VALUESETDEFINITIONS}, method=RequestMethod.HEAD)
@ResponseBody
public void getValueSetDefinitionsCount(
HttpServletResponse httpServletResponse,
RestReadContext restReadContext,
ValueSetDefinitionQueryServiceRestrictions restrictions,
RestFilter restFilter) {
ValueSetDefinitionQueryBuilder builder = this.getNewResourceQueryBuilder();
ValueSetDefinitionQuery resourceQuery = builder.
addRestFilter(restFilter).
addRestrictions(restrictions).
addRestReadContext(restReadContext).
build();
int count =
this.valueSetDefinitionQueryService.count(resourceQuery);
this.setCount(count, httpServletResponse);
}
/**
* Gets the value set definition by name.
*
* @param httpServletRequest the http servlet request
* @param valueSetName the value set name
* @param valueSetDefinitionDocumentUri the value set definition document uri
* @return the value set definition by name
*/
@RequestMapping(value={
PATH_VALUESETDEFINITION_BYURI
},
method=RequestMethod.GET)
public ModelAndView getValueSetDefinitionByUri(
HttpServletRequest httpServletRequest,
RestReadContext restReadContext,
@RequestParam(PARAM_URI) String uri,
@RequestParam(value="redirect", defaultValue=DEFAULT_REDIRECT) boolean redirect) {
ValueSetDefinitionReadId id = new ValueSetDefinitionReadId(uri);
return this.doReadByUri(
httpServletRequest,
MESSAGE_FACTORY,
PATH_VALUESETDEFINITION_BYURI,
PATH_VALUESETDEFINITION_OF_VALUESET_BYID,
URL_BINDER,
this.valueSetDefinitionReadService,
restReadContext,
UnknownValueSetDefinition.class,
id,
redirect);
}
@RequestMapping(value={
PATH_VALUESETDEFINITION_OF_VALUESET_BYID
},
method=RequestMethod.GET)
public Object getValueSetDefinitionByLocalId(
HttpServletRequest httpServletRequest,
RestReadContext restReadContext,
@PathVariable(VAR_VALUESETID) String valueSetName,
@PathVariable(VAR_VALUESETDEFINITIONID) String definitionLocalId) {
ValueSetDefinitionReadId id =
new ValueSetDefinitionReadId(
definitionLocalId,
ModelUtils.nameOrUriFromName(valueSetName));
return this.doRead(
httpServletRequest,
MESSAGE_FACTORY,
this.valueSetDefinitionReadService,
restReadContext,
UnknownValueSetDefinition.class,
id);
}
@RequestMapping(value={
PATH_VALUESETBYID + "/" + VALUE_SET_RESOLUTION_SHORT + "/" + ALL_WILDCARD,
PATH_VALUESETBYID + "/" + VALUE_SET_RESOLUTION_SHORT
},
method=RequestMethod.GET)
public ModelAndView getValueSetDefinitionOfValueSetByTagRedirect(
HttpServletRequest httpServletRequest,
RestReadContext restReadContext,
QueryControl queryControl,
@PathVariable(VAR_VALUESETID) String valueSetName,
@RequestParam(value=PARAM_TAG, defaultValue=DEFAULT_TAG) String tag,
@RequestParam(value="redirect", defaultValue=DEFAULT_REDIRECT) boolean redirect) {
//TODO: Accept tag URIs here
VersionTagReference tagReference = new VersionTagReference(tag);
String valueSetDefinitionLocalId =
this.tagResolver.
getVersionNameFromTag(
this.valueSetDefinitionReadService,
ModelUtils.nameOrUriFromName(valueSetName),
tagReference,
this.resolveRestReadContext(restReadContext));
String contextPath = this.getUrlPathHelper().getContextPath(httpServletRequest);
String requestUri = StringUtils.removeStart(this.getUrlPathHelper().getRequestUri(httpServletRequest),contextPath);
requestUri = StringUtils.removeStart(requestUri, "/");
requestUri = StringUtils.replaceOnce(
requestUri,
VALUESET + "/" + valueSetName + "/",
VALUESET + "/" + valueSetName + "/" +
VALUESETDEFINITION_SHORT + "/" + valueSetDefinitionLocalId + "/");
if(redirect){
@SuppressWarnings("unchecked")
Map<String,Object> parameters =
new HashMap<String,Object>(httpServletRequest.getParameterMap());
parameters.remove(PARAM_REDIRECT);
return new ModelAndView(
"redirect:"+ "/" + requestUri + this.mapToQueryString(parameters));
} else {
return new ModelAndView(
"forward:"+ "/" + requestUri);
}
}
/**
* Creates the value set definition.
*
* @param httpServletRequest the http servlet request
* @param valueSetDefinition the value set definition
* @param changeseturi the changeseturi
* @param valueSetName the value set name
* @param valueSetDefinitionDocumentUri the value set definition document uri
*/
@RequestMapping(value=PATH_VALUESETDEFINITION_OF_VALUESET_BYID, method=RequestMethod.PUT)
public Object updateValueSetDefinition(
HttpServletRequest httpServletRequest,
HttpServletResponse httpServletResponse,
@RequestBody ValueSetDefinition valueSetDefinition,
@RequestParam(value=PARAM_CHANGESETCONTEXT, required=false) String changeseturi,
@PathVariable(VAR_VALUESETID) String valueSetName,
@PathVariable(VAR_VALUESETDEFINITIONID) String valueSetDefinitionLocalId) {
return this.doUpdate(
httpServletResponse,
new LocalIdValueSetDefinition(valueSetDefinitionLocalId, valueSetDefinition),
changeseturi,
new ValueSetDefinitionReadId(
valueSetDefinitionLocalId,
ModelUtils.nameOrUriFromName(valueSetName)),
this.valueSetDefinitionMaintenanceService);
}
@RequestMapping(value=PATH_VALUESETDEFINITION, method=RequestMethod.POST)
public Object createValueSetDefinition(
HttpServletRequest httpServletRequest,
HttpServletResponse httpServletResponse,
@RequestBody ValueSetDefinition valueSetDefinition,
@RequestParam(value=PARAM_CHANGESETCONTEXT, required=false) String changeseturi) {
return this.doCreate(
httpServletResponse,
valueSetDefinition,
changeseturi,
PATH_VALUESETDEFINITION_OF_VALUESET_BYID,
URL_BINDER,
this.valueSetDefinitionMaintenanceService);
}
@RequestMapping(value=PATH_VALUESETDEFINITION_OF_VALUESET_BYID, method=RequestMethod.DELETE)
public Object deleteValueSetDefinition(
HttpServletRequest httpServletRequest,
HttpServletResponse httpServletResponse,
RestReadContext restReadContext,
@PathVariable(VAR_VALUESETID) String valueSetName,
@PathVariable(VAR_VALUESETDEFINITIONID) String valueSetDefinitionLocalId,
@RequestParam(PARAM_CHANGESETCONTEXT) String changeseturi) {
ValueSetDefinitionReadId id =
new ValueSetDefinitionReadId(
valueSetDefinitionLocalId,
ModelUtils.nameOrUriFromName(valueSetName));
return this.doDelete(
httpServletResponse,
id,
changeseturi,
this.valueSetDefinitionMaintenanceService);
}
private ValueSetDefinitionQueryBuilder getNewResourceQueryBuilder(){
return new ValueSetDefinitionQueryBuilder(
this.valueSetDefinitionQueryService,
this.getFilterResolver(),
this.getReadContextResolver());
}
public ValueSetDefinitionQueryService getValueSetDefinitionQueryService() {
return valueSetDefinitionQueryService;
}
public void setValueSetDefinitionQueryService(
ValueSetDefinitionQueryService valueSetDefinitionQueryService) {
this.valueSetDefinitionQueryService = valueSetDefinitionQueryService;
}
public ValueSetDefinitionReadService getValueSetDefinitionReadService() {
return valueSetDefinitionReadService;
}
public void setValueSetDefinitionReadService(
ValueSetDefinitionReadService valueSetDefinitionReadService) {
this.valueSetDefinitionReadService = valueSetDefinitionReadService;
}
public ValueSetDefinitionMaintenanceService getValueSetDefinitionMaintenanceService() {
return valueSetDefinitionMaintenanceService;
}
public void setValueSetDefinitionMaintenanceService(
ValueSetDefinitionMaintenanceService valueSetDefinitionMaintenanceService) {
this.valueSetDefinitionMaintenanceService = valueSetDefinitionMaintenanceService;
}
} | apache-2.0 |
Pimm/closure-compiler | src/com/google/javascript/rhino/jstype/PrototypeObjectType.java | 18054 | /*
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Bob Jervis
* Google Inc.
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package com.google.javascript.rhino.jstype;
import static com.google.common.base.Preconditions.checkState;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.javascript.rhino.ErrorReporter;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.Node;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
/**
* The object type represents instances of JavaScript objects such as
* {@code Object}, {@code Date}, {@code Function}.<p>
*
* Objects in JavaScript are unordered collections of properties.
* Each property consists of a name, a value and a set of attributes.<p>
*
* Each instance has an implicit prototype property ({@code [[Prototype]]})
* pointing to an object instance, which itself has an implicit property, thus
* forming a chain.<p>
*
* A class begins life with no name. Later, a name may be provided once it
* can be inferred. Note that the name in this case is strictly for
* debugging purposes. Looking up type name references goes through the
* {@link JSTypeRegistry}.<p>
*/
public class PrototypeObjectType extends ObjectType {
private static final long serialVersionUID = 1L;
private final String className;
private final PropertyMap properties;
private final boolean nativeType;
private final boolean anonymousType;
// NOTE(nicksantos): The implicit prototype can change over time.
// Modeling this is a bear. Always call getImplicitPrototype(), because
// some subclasses override this to do special resolution handling.
private ObjectType implicitPrototypeFallback;
// If this is a function prototype, then this is the owner.
// A PrototypeObjectType can only be the prototype of one function. If we try
// to do this for multiple functions, then we'll have to create a new one.
private FunctionType ownerFunction = null;
// Whether the toString representation of this should be pretty-printed,
// by printing all properties.
private boolean prettyPrint = false;
private static final int MAX_PRETTY_PRINTED_PROPERTIES = 10;
/**
* Creates an object type.
*
* @param className the name of the class. May be {@code null} to
* denote an anonymous class.
*
* @param implicitPrototype the implicit prototype
* (a.k.a. {@code [[Prototype]]}) as defined by ECMA-262. If the
* implicit prototype is {@code null} the implicit prototype will be
* set to the {@link JSTypeNative#OBJECT_TYPE}.
*/
PrototypeObjectType(JSTypeRegistry registry, String className,
ObjectType implicitPrototype) {
this(
registry,
className,
implicitPrototype,
false /* nativeType */,
null /* templateTypeMap */,
false /* anonymousType */);
}
/**
* Creates an object type.
*
* @param className the name of the class. May be {@code null} to
* denote an anonymous class.
*
* @param implicitPrototype the implicit prototype
* (a.k.a. {@code [[Prototype]]}) as defined by ECMA-262. If the
* implicit prototype is {@code null} the implicit prototype will be
* set to the {@link JSTypeNative#OBJECT_TYPE}.
* @param anonymousType True if the class is intended to be anonymous.
*/
PrototypeObjectType(JSTypeRegistry registry, String className,
ObjectType implicitPrototype, boolean anonymousType) {
this(
registry,
className,
implicitPrototype,
false /* nativeType */,
null /* templateTypeMap */,
anonymousType);
}
/**
* Creates an object type, allowing specification of the implicit prototype,
* whether the object is native, and any templatized types.
*/
PrototypeObjectType(JSTypeRegistry registry, String className,
ObjectType implicitPrototype, boolean nativeType,
TemplateTypeMap templateTypeMap) {
this(
registry,
className,
implicitPrototype,
nativeType,
templateTypeMap,
false /* anonymousType */);
}
/**
* Creates an object type, allowing specification of the implicit prototype,
* whether the object is native, and any templatized types.
*/
PrototypeObjectType(JSTypeRegistry registry, String className,
ObjectType implicitPrototype, boolean nativeType,
TemplateTypeMap templateTypeMap, boolean anonymousType) {
super(registry, templateTypeMap);
this.properties = new PropertyMap();
this.properties.setParentSource(this);
this.className = className;
this.nativeType = nativeType;
this.anonymousType = anonymousType;
if (nativeType || implicitPrototype != null) {
setImplicitPrototype(implicitPrototype);
} else {
setImplicitPrototype(
registry.getNativeObjectType(JSTypeNative.OBJECT_TYPE));
}
}
@Override
PropertyMap getPropertyMap() {
return properties;
}
@Override
boolean defineProperty(String name, JSType type, boolean inferred,
Node propertyNode) {
if (hasOwnDeclaredProperty(name)) {
return false;
}
Property newProp = new Property(
name, type, inferred, propertyNode);
properties.putProperty(name, newProp);
return true;
}
@Override
public boolean removeProperty(String name) {
return properties.removeProperty(name);
}
@Override
public void setPropertyJSDocInfo(String propertyName, JSDocInfo info) {
if (info != null) {
if (properties.getOwnProperty(propertyName) == null) {
// If docInfo was attached, but the type of the property
// was not defined anywhere, then we consider this an explicit
// declaration of the property.
defineInferredProperty(propertyName, getPropertyType(propertyName),
null);
}
// The prototype property is not represented as a normal Property.
// We probably don't want to attach any JSDoc to it anyway.
Property property = properties.getOwnProperty(propertyName);
if (property != null) {
property.setJSDocInfo(info);
}
}
}
@Override
public void setPropertyNode(String propertyName, Node defSite) {
Property property = properties.getOwnProperty(propertyName);
if (property != null) {
property.setNode(defSite);
}
}
@Override
public boolean matchesNumberContext() {
return isNumberObjectType() || isDateType() || isBooleanObjectType() ||
isStringObjectType() || hasOverridenNativeProperty("valueOf");
}
@Override
public boolean matchesStringContext() {
return isTheObjectType() || isStringObjectType() || isDateType() ||
isRegexpType() || isArrayType() || isNumberObjectType() ||
isBooleanObjectType() || hasOverridenNativeProperty("toString");
}
/**
* Given the name of a native object property, checks whether the property is
* present on the object and different from the native one.
*/
private boolean hasOverridenNativeProperty(String propertyName) {
if (isNativeObjectType()) {
return false;
}
JSType propertyType = getPropertyType(propertyName);
ObjectType nativeType =
isFunctionType() ?
registry.getNativeObjectType(JSTypeNative.FUNCTION_PROTOTYPE) :
registry.getNativeObjectType(JSTypeNative.OBJECT_PROTOTYPE);
JSType nativePropertyType = nativeType.getPropertyType(propertyName);
return propertyType != nativePropertyType;
}
@Override
public JSType unboxesTo() {
if (isStringObjectType()) {
return getNativeType(JSTypeNative.STRING_TYPE);
} else if (isBooleanObjectType()) {
return getNativeType(JSTypeNative.BOOLEAN_TYPE);
} else if (isNumberObjectType()) {
return getNativeType(JSTypeNative.NUMBER_TYPE);
} else {
return super.unboxesTo();
}
}
@Override
public boolean matchesObjectContext() {
return true;
}
@Override
public boolean canBeCalled() {
return isRegexpType();
}
@Override
StringBuilder appendTo(StringBuilder sb, boolean forAnnotations) {
if (hasReferenceName()) {
return sb.append(getReferenceName());
}
if (!prettyPrint) {
return sb.append(forAnnotations ? "?" : "{...}");
}
// Don't pretty print recursively.
prettyPrint = false;
// Use a tree set so that the properties are sorted.
Set<String> propertyNames = new TreeSet<>();
for (ObjectType current = this;
current != null && !current.isNativeObjectType() &&
propertyNames.size() <= MAX_PRETTY_PRINTED_PROPERTIES;
current = current.getImplicitPrototype()) {
propertyNames.addAll(current.getOwnPropertyNames());
}
sb.append("{");
boolean useNewlines = !forAnnotations && propertyNames.size() > 2;
int i = 0;
for (String property : propertyNames) {
if (i > 0) {
sb.append(",");
}
if (useNewlines) {
sb.append("\n ");
} else if (i > 0) {
sb.append(" ");
}
sb.append(property).append(": ");
getPropertyType(property).appendAsNonNull(sb, forAnnotations);
++i;
if (!forAnnotations && i == MAX_PRETTY_PRINTED_PROPERTIES) {
sb.append(", ...");
break;
}
}
if (useNewlines) {
sb.append("\n");
}
sb.append("}");
prettyPrint = true;
return sb;
}
void setPrettyPrint(boolean prettyPrint) {
this.prettyPrint = prettyPrint;
}
boolean isPrettyPrint() {
return prettyPrint;
}
@Override
public FunctionType getConstructor() {
return null;
}
@Override
public ObjectType getImplicitPrototype() {
return implicitPrototypeFallback;
}
/**
* This should only be reset on the FunctionPrototypeType, only to fix an
* incorrectly established prototype chain due to the user having a mismatch
* in super class declaration, and only before properties on that type are
* processed.
*/
final void setImplicitPrototype(ObjectType implicitPrototype) {
checkState(!hasCachedValues());
this.implicitPrototypeFallback = implicitPrototype;
}
@Override
public String getReferenceName() {
if (className != null) {
return className;
} else if (ownerFunction != null) {
return ownerFunction.getReferenceName() + ".prototype";
} else {
return null;
}
}
@Override
public boolean hasReferenceName() {
return className != null || ownerFunction != null;
}
public boolean isAnonymous() {
return anonymousType;
}
@Override
public boolean isInstanceofObject() {
return isAnonymous();
}
@Override
public boolean isSubtype(JSType that) {
return isSubtype(that, ImplCache.create(), SubtypingMode.NORMAL);
}
@Override
protected boolean isSubtype(JSType that,
ImplCache implicitImplCache, SubtypingMode subtypingMode) {
if (JSType.isSubtypeHelper(this, that, implicitImplCache, subtypingMode)) {
return true;
}
// Union types
if (that.isUnionType()) {
// The static {@code JSType.isSubtype} check already decomposed
// union types, so we don't need to check those again.
return false;
}
// record types
if (that.isRecordType()) {
return PrototypeObjectType.isSubtype(
this, that.toMaybeRecordType(), implicitImplCache, subtypingMode);
}
// Interfaces
// Find all the interfaces implemented by this class and compare each one
// to the interface instance.
ObjectType thatObj = that.toObjectType();
FunctionType thatCtor = thatObj == null ? null : thatObj.getConstructor();
if (getConstructor() != null && getConstructor().isInterface()) {
for (ObjectType thisInterface : getCtorExtendedInterfaces()) {
if (thisInterface.isSubtype(that, implicitImplCache, subtypingMode)) {
return true;
}
}
} else if (thatCtor != null && thatCtor.isInterface()) {
Iterable<ObjectType> thisInterfaces = getCtorImplementedInterfaces();
for (ObjectType thisInterface : thisInterfaces) {
if (thisInterface.isSubtype(that, implicitImplCache, subtypingMode)) {
return true;
}
}
}
// other prototype based objects
if (isUnknownType()) {
// If unsure, say 'yes', to avoid spurious warnings.
return true;
}
return thatObj != null && isImplicitPrototype(thatObj);
}
/** Determines if typeA is a subtype of typeB */
private static boolean isSubtype(ObjectType typeA, RecordType typeB,
ImplCache implicitImplCache, SubtypingMode subtypingMode) {
// typeA is a subtype of record type typeB iff:
// 1) typeA has all the properties declared in typeB.
// 2) And for each property of typeB, its type must be
// a super type of the corresponding property of typeA.
for (String property : typeB.getOwnPropertyNames()) {
if (!typeA.hasProperty(property)) {
return false;
}
JSType propA = typeA.getPropertyType(property);
JSType propB = typeB.getPropertyType(property);
if (!propA.isSubtype(propB, implicitImplCache, subtypingMode)) {
return false;
}
}
return true;
}
@Override
public boolean hasCachedValues() {
return super.hasCachedValues();
}
/** Whether this is a built-in object. */
@Override
public boolean isNativeObjectType() {
return nativeType;
}
@Override
void setOwnerFunction(FunctionType type) {
Preconditions.checkState(ownerFunction == null || type == null);
ownerFunction = type;
}
@Override
public FunctionType getOwnerFunction() {
return ownerFunction;
}
@Override
public Iterable<ObjectType> getCtorImplementedInterfaces() {
return isFunctionPrototypeType()
? getOwnerFunction().getImplementedInterfaces()
: ImmutableList.<ObjectType>of();
}
@Override
public Iterable<ObjectType> getCtorExtendedInterfaces() {
return isFunctionPrototypeType()
? getOwnerFunction().getExtendedInterfaces()
: ImmutableList.<ObjectType>of();
}
@Override
JSType resolveInternal(ErrorReporter t, StaticTypedScope<JSType> scope) {
setResolvedTypeInternal(this);
ObjectType implicitPrototype = getImplicitPrototype();
if (implicitPrototype != null) {
implicitPrototypeFallback =
(ObjectType) implicitPrototype.resolve(t, scope);
FunctionType ctor = getConstructor();
if (ctor != null) {
FunctionType superCtor = ctor.getSuperClassConstructor();
if (superCtor != null) {
// If the super ctor of this prototype object was not known before resolution, then the
// subTypes would not have been set. Update them.
superCtor.addSubTypeIfNotPresent(ctor);
}
}
}
for (Property prop : properties.values()) {
prop.setType(safeResolve(prop.getType(), t, scope));
}
return this;
}
@Override
public void matchConstraint(JSType constraint) {
// We only want to match constraints on anonymous types.
if (hasReferenceName()) {
return;
}
// Handle the case where the constraint object is a record type.
//
// param constraint {{prop: (number|undefined)}}
// function f(constraint) {}
// f({});
//
// We want to modify the object literal to match the constraint, by
// taking any each property on the record and trying to match
// properties on this object.
if (constraint.isRecordType()) {
matchRecordTypeConstraint(constraint.toObjectType());
} else if (constraint.isUnionType()) {
for (JSType alt : constraint.toMaybeUnionType().getAlternates()) {
if (alt.isRecordType()) {
matchRecordTypeConstraint(alt.toObjectType());
}
}
}
}
public void matchRecordTypeConstraint(ObjectType constraintObj) {
for (String prop : constraintObj.getOwnPropertyNames()) {
JSType propType = constraintObj.getPropertyType(prop);
if (!isPropertyTypeDeclared(prop)) {
JSType typeToInfer = propType;
if (!hasProperty(prop)) {
typeToInfer = getNativeType(JSTypeNative.VOID_TYPE)
.getLeastSupertype(propType);
}
defineInferredProperty(prop, typeToInfer, null);
}
}
}
@Override
public int hashCode() {
if (isStructuralType()) {
return Objects.hash(className, properties);
} else {
return System.identityHashCode(this);
}
}
}
| apache-2.0 |
studanshu/datacollector | cli/src/main/java/com/streamsets/datacollector/client/model/SampledRecordJson.java | 2240 | /**
* Copyright 2015 StreamSets Inc.
*
* Licensed under the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.client.model;
import com.streamsets.datacollector.client.StringUtil;
import com.streamsets.datacollector.client.model.RecordJson;
import io.swagger.annotations.*;
import com.fasterxml.jackson.annotation.JsonProperty;
@ApiModel(description = "")
@javax.annotation.Generated(value = "class io.swagger.codegen.languages.JavaClientCodegen", date = "2015-09-11T14:51:29.367-07:00")
public class SampledRecordJson {
private RecordJson record = null;
private Boolean matchedCondition = null;
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("record")
public RecordJson getRecord() {
return record;
}
public void setRecord(RecordJson record) {
this.record = record;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("matchedCondition")
public Boolean getMatchedCondition() {
return matchedCondition;
}
public void setMatchedCondition(Boolean matchedCondition) {
this.matchedCondition = matchedCondition;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class SampledRecordJson {\n");
sb.append(" record: ").append(StringUtil.toIndentedString(record)).append("\n");
sb.append(" matchedCondition: ").append(StringUtil.toIndentedString(matchedCondition)).append("\n");
sb.append("}");
return sb.toString();
}
}
| apache-2.0 |
TelekomAustriaGroup/incubator-zeppelin | zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreter.java | 12083 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.remote;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.thrift.TException;
import org.apache.zeppelin.display.GUI;
import org.apache.zeppelin.interpreter.Interpreter;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterContextRunner;
import org.apache.zeppelin.interpreter.InterpreterException;
import org.apache.zeppelin.interpreter.InterpreterGroup;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResult.Code;
import org.apache.zeppelin.interpreter.InterpreterResult.Type;
import org.apache.zeppelin.interpreter.WrappedInterpreter;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterContext;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterResult;
import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService.Client;
import org.apache.zeppelin.scheduler.Scheduler;
import org.apache.zeppelin.scheduler.SchedulerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
/**
*
*/
public class RemoteInterpreter extends Interpreter {
Logger logger = LoggerFactory.getLogger(RemoteInterpreter.class);
Gson gson = new Gson();
private String interpreterRunner;
private String interpreterPath;
private String className;
FormType formType;
boolean initialized;
private Map<String, String> env;
static Map<String, RemoteInterpreterProcess> interpreterGroupReference
= new HashMap<String, RemoteInterpreterProcess>();
private int connectTimeout;
public RemoteInterpreter(Properties property,
String className,
String interpreterRunner,
String interpreterPath,
int connectTimeout) {
super(property);
this.className = className;
initialized = false;
this.interpreterRunner = interpreterRunner;
this.interpreterPath = interpreterPath;
env = new HashMap<String, String>();
this.connectTimeout = connectTimeout;
}
public RemoteInterpreter(Properties property,
String className,
String interpreterRunner,
String interpreterPath,
Map<String, String> env,
int connectTimeout) {
super(property);
this.className = className;
this.interpreterRunner = interpreterRunner;
this.interpreterPath = interpreterPath;
this.env = env;
this.connectTimeout = connectTimeout;
}
@Override
public String getClassName() {
return className;
}
public RemoteInterpreterProcess getInterpreterProcess() {
synchronized (interpreterGroupReference) {
if (interpreterGroupReference.containsKey(getInterpreterGroupKey(getInterpreterGroup()))) {
RemoteInterpreterProcess interpreterProcess = interpreterGroupReference
.get(getInterpreterGroupKey(getInterpreterGroup()));
try {
return interpreterProcess;
} catch (Exception e) {
throw new InterpreterException(e);
}
} else {
// closed or not opened yet
return null;
}
}
}
private synchronized void init() {
if (initialized == true) {
return;
}
RemoteInterpreterProcess interpreterProcess = null;
synchronized (interpreterGroupReference) {
if (interpreterGroupReference.containsKey(getInterpreterGroupKey(getInterpreterGroup()))) {
interpreterProcess = interpreterGroupReference
.get(getInterpreterGroupKey(getInterpreterGroup()));
} else {
throw new InterpreterException("Unexpected error");
}
}
int rc = interpreterProcess.reference(getInterpreterGroup());
synchronized (interpreterProcess) {
// when first process created
if (rc == 1) {
// create all interpreter class in this interpreter group
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
try {
for (Interpreter intp : this.getInterpreterGroup()) {
logger.info("Create remote interpreter {}", intp.getClassName());
client.createInterpreter(intp.getClassName(), (Map) property);
}
} catch (TException e) {
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client);
}
}
}
initialized = true;
}
@Override
public void open() {
init();
}
@Override
public void close() {
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
try {
client.close(className);
} catch (TException e) {
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client);
}
int r = interpreterProcess.dereference();
if (r == 0) {
synchronized (interpreterGroupReference) {
InterpreterGroup intpGroup = getInterpreterGroup();
interpreterGroupReference.remove(getInterpreterGroupKey(intpGroup));
}
}
}
@Override
public InterpreterResult interpret(String st, InterpreterContext context) {
logger.debug("st: {}", st);
FormType form = getFormType();
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
InterpreterContextRunnerPool interpreterContextRunnerPool = interpreterProcess
.getInterpreterContextRunnerPool();
List<InterpreterContextRunner> runners = context.getRunners();
if (runners != null && runners.size() != 0) {
// assume all runners in this InterpreterContext have the same note id
String noteId = runners.get(0).getNoteId();
interpreterContextRunnerPool.clear(noteId);
interpreterContextRunnerPool.addAll(noteId, runners);
}
try {
GUI settings = context.getGui();
RemoteInterpreterResult remoteResult = client.interpret(className, st, convert(context));
Map<String, Object> remoteConfig = (Map<String, Object>) gson.fromJson(
remoteResult.getConfig(), new TypeToken<Map<String, Object>>() {
}.getType());
context.getConfig().clear();
context.getConfig().putAll(remoteConfig);
if (form == FormType.NATIVE) {
GUI remoteGui = gson.fromJson(remoteResult.getGui(), GUI.class);
context.getGui().clear();
context.getGui().setParams(remoteGui.getParams());
context.getGui().setForms(remoteGui.getForms());
}
InterpreterResult result = convert(remoteResult);
return result;
} catch (TException e) {
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client);
}
}
@Override
public void cancel(InterpreterContext context) {
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
try {
client.cancel(className, convert(context));
} catch (TException e) {
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client);
}
}
@Override
public FormType getFormType() {
init();
if (formType != null) {
return formType;
}
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
try {
formType = FormType.valueOf(client.getFormType(className));
return formType;
} catch (TException e) {
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client);
}
}
@Override
public int getProgress(InterpreterContext context) {
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
try {
return client.getProgress(className, convert(context));
} catch (TException e) {
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client);
}
}
@Override
public List<String> completion(String buf, int cursor) {
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
Client client = null;
try {
client = interpreterProcess.getClient();
} catch (Exception e1) {
throw new InterpreterException(e1);
}
try {
return client.completion(className, buf, cursor);
} catch (TException e) {
throw new InterpreterException(e);
} finally {
interpreterProcess.releaseClient(client);
}
}
@Override
public Scheduler getScheduler() {
int maxConcurrency = 10;
RemoteInterpreterProcess interpreterProcess = getInterpreterProcess();
if (interpreterProcess == null) {
return null;
} else {
return SchedulerFactory.singleton().createOrGetRemoteScheduler(
"remoteinterpreter_" + interpreterProcess.hashCode(), interpreterProcess,
maxConcurrency);
}
}
@Override
public void setInterpreterGroup(InterpreterGroup interpreterGroup) {
super.setInterpreterGroup(interpreterGroup);
synchronized (interpreterGroupReference) {
RemoteInterpreterProcess intpProcess = interpreterGroupReference
.get(getInterpreterGroupKey(interpreterGroup));
// when interpreter process is not created or terminated
if (intpProcess == null || (!intpProcess.isRunning() && intpProcess.getPort() > 0)
|| (!intpProcess.isRunning() && intpProcess.getPort() == -1)) {
interpreterGroupReference.put(getInterpreterGroupKey(interpreterGroup),
new RemoteInterpreterProcess(interpreterRunner,
interpreterPath, env, connectTimeout));
logger.info("setInterpreterGroup = "
+ getInterpreterGroupKey(interpreterGroup) + " class=" + className
+ ", path=" + interpreterPath);
}
}
}
private String getInterpreterGroupKey(InterpreterGroup interpreterGroup) {
return interpreterGroup.getId();
}
private RemoteInterpreterContext convert(InterpreterContext ic) {
return new RemoteInterpreterContext(
ic.getNoteId(),
ic.getParagraphId(),
ic.getParagraphTitle(),
ic.getParagraphText(),
gson.toJson(ic.getConfig()),
gson.toJson(ic.getGui()),
gson.toJson(ic.getRunners()));
}
private InterpreterResult convert(RemoteInterpreterResult result) {
return new InterpreterResult(
InterpreterResult.Code.valueOf(result.getCode()),
Type.valueOf(result.getType()),
result.getMsg());
}
}
| apache-2.0 |
prabushi/devstudio-tooling-esb | plugins/org.wso2.developerstudio.eclipse.gmf.esb/src/org/wso2/developerstudio/eclipse/gmf/esb/ValidateFeature.java | 435 | /**
* <copyright>
* </copyright>
*
* $Id$
*/
package org.wso2.developerstudio.eclipse.gmf.esb;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Validate Feature</b></em>'.
* <!-- end-user-doc -->
*
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage#getValidateFeature()
* @model
* @generated
*/
public interface ValidateFeature extends AbstractBooleanFeature {
} // ValidateFeature
| apache-2.0 |
punkhorn/camel-upstream | core/camel-core/src/test/java/org/apache/camel/util/ObjectHelperTest.java | 40713 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.util;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Properties;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.component.bean.MyOtherFooBean;
import org.apache.camel.component.bean.MyOtherFooBean.AbstractClassSize;
import org.apache.camel.component.bean.MyOtherFooBean.Clazz;
import org.apache.camel.component.bean.MyOtherFooBean.InterfaceSize;
import org.apache.camel.component.bean.MyStaticClass;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.camel.support.CamelContextHelper;
import org.apache.camel.support.DefaultMessage;
import org.apache.camel.support.ObjectHelper;
import org.junit.Assert;
import org.junit.Test;
public class ObjectHelperTest extends Assert {
@Test
public void testLoadResourceAsStream() {
InputStream res1 = org.apache.camel.util.ObjectHelper.loadResourceAsStream("org/apache/camel/util/ObjectHelperResourceTestFile.properties");
InputStream res2 = org.apache.camel.util.ObjectHelper.loadResourceAsStream("/org/apache/camel/util/ObjectHelperResourceTestFile.properties");
assertNotNull("Cannot load resource without leading \"/\"", res1);
assertNotNull("Cannot load resource with leading \"/\"", res2);
IOHelper.close(res1, res2);
}
@Test
public void testLoadResource() {
URL url1 = org.apache.camel.util.ObjectHelper.loadResourceAsURL("org/apache/camel/util/ObjectHelperResourceTestFile.properties");
URL url2 = org.apache.camel.util.ObjectHelper.loadResourceAsURL("/org/apache/camel/util/ObjectHelperResourceTestFile.properties");
assertNotNull("Cannot load resource without leading \"/\"", url1);
assertNotNull("Cannot load resource with leading \"/\"", url2);
}
@Test
public void testGetPropertyName() throws Exception {
Method method = getClass().getMethod("setCheese", String.class);
assertNotNull("should have found a method!", method);
String name = org.apache.camel.util.ObjectHelper.getPropertyName(method);
assertEquals("Property name", "cheese", name);
}
public void setCheese(String cheese) {
// used in the above unit test
}
@Test
public void testContains() throws Exception {
String[] array = {"foo", "bar"};
Collection<String> collection = Arrays.asList(array);
assertTrue(ObjectHelper.contains(array, "foo"));
assertTrue(ObjectHelper.contains(collection, "foo"));
assertTrue(ObjectHelper.contains("foo", "foo"));
assertFalse(ObjectHelper.contains(array, "xyz"));
assertFalse(ObjectHelper.contains(collection, "xyz"));
assertFalse(ObjectHelper.contains("foo", "xyz"));
}
@Test
public void testContainsStringBuilder() throws Exception {
StringBuilder sb = new StringBuilder();
sb.append("Hello World");
assertTrue(ObjectHelper.contains(sb, "World"));
assertTrue(ObjectHelper.contains(sb, new StringBuffer("World")));
assertTrue(ObjectHelper.contains(sb, new StringBuilder("World")));
assertFalse(ObjectHelper.contains(sb, "Camel"));
assertFalse(ObjectHelper.contains(sb, new StringBuffer("Camel")));
assertFalse(ObjectHelper.contains(sb, new StringBuilder("Camel")));
}
@Test
public void testContainsStringBuffer() throws Exception {
StringBuffer sb = new StringBuffer();
sb.append("Hello World");
assertTrue(ObjectHelper.contains(sb, "World"));
assertTrue(ObjectHelper.contains(sb, new StringBuffer("World")));
assertTrue(ObjectHelper.contains(sb, new StringBuilder("World")));
assertFalse(ObjectHelper.contains(sb, "Camel"));
assertFalse(ObjectHelper.contains(sb, new StringBuffer("Camel")));
assertFalse(ObjectHelper.contains(sb, new StringBuilder("Camel")));
}
@Test
public void testEqual() {
assertTrue(org.apache.camel.util.ObjectHelper.equal(null, null));
assertTrue(org.apache.camel.util.ObjectHelper.equal("", ""));
assertTrue(org.apache.camel.util.ObjectHelper.equal(" ", " "));
assertTrue(org.apache.camel.util.ObjectHelper.equal("Hello", "Hello"));
assertTrue(org.apache.camel.util.ObjectHelper.equal(123, 123));
assertTrue(org.apache.camel.util.ObjectHelper.equal(true, true));
assertFalse(org.apache.camel.util.ObjectHelper.equal(null, ""));
assertFalse(org.apache.camel.util.ObjectHelper.equal("", null));
assertFalse(org.apache.camel.util.ObjectHelper.equal(" ", " "));
assertFalse(org.apache.camel.util.ObjectHelper.equal("Hello", "World"));
assertFalse(org.apache.camel.util.ObjectHelper.equal(true, false));
assertFalse(org.apache.camel.util.ObjectHelper.equal(new Object(), new Object()));
byte[] a = new byte[] {40, 50, 60};
byte[] b = new byte[] {40, 50, 60};
assertTrue(org.apache.camel.util.ObjectHelper.equal(a, b));
a = new byte[] {40, 50, 60};
b = new byte[] {40, 50, 60, 70};
assertFalse(org.apache.camel.util.ObjectHelper.equal(a, b));
}
@Test
public void testEqualByteArray() {
assertTrue(org.apache.camel.util.ObjectHelper.equalByteArray("Hello".getBytes(), "Hello".getBytes()));
assertFalse(org.apache.camel.util.ObjectHelper.equalByteArray("Hello".getBytes(), "World".getBytes()));
assertTrue(org.apache.camel.util.ObjectHelper.equalByteArray("Hello Thai Elephant \u0E08".getBytes(), "Hello Thai Elephant \u0E08".getBytes()));
assertTrue(org.apache.camel.util.ObjectHelper.equalByteArray(null, null));
byte[] empty = new byte[0];
assertTrue(org.apache.camel.util.ObjectHelper.equalByteArray(empty, empty));
byte[] a = new byte[] {40, 50, 60};
byte[] b = new byte[] {40, 50, 60};
assertTrue(org.apache.camel.util.ObjectHelper.equalByteArray(a, b));
a = new byte[] {40, 50, 60};
b = new byte[] {40, 50, 60, 70};
assertFalse(org.apache.camel.util.ObjectHelper.equalByteArray(a, b));
a = new byte[] {40, 50, 60, 70};
b = new byte[] {40, 50, 60};
assertFalse(org.apache.camel.util.ObjectHelper.equalByteArray(a, b));
a = new byte[] {40, 50, 60};
b = new byte[0];
assertFalse(org.apache.camel.util.ObjectHelper.equalByteArray(a, b));
a = new byte[0];
b = new byte[] {40, 50, 60};
assertFalse(org.apache.camel.util.ObjectHelper.equalByteArray(a, b));
a = new byte[] {40, 50, 60};
b = null;
assertFalse(org.apache.camel.util.ObjectHelper.equalByteArray(a, b));
a = null;
b = new byte[] {40, 50, 60};
assertFalse(org.apache.camel.util.ObjectHelper.equalByteArray(a, b));
a = null;
b = null;
assertTrue(org.apache.camel.util.ObjectHelper.equalByteArray(a, b));
}
@Test
public void testCreateIterator() {
List<String> list = new ArrayList<>();
Iterator<String> iterator = list.iterator();
assertSame("Should return the same iterator", iterator, ObjectHelper.createIterator(iterator));
}
@Test
public void testCreateIteratorAllowEmpty() {
String s = "a,b,,c";
Iterator<?> it = ObjectHelper.createIterator(s, ",", true);
assertEquals("a", it.next());
assertEquals("b", it.next());
assertEquals("", it.next());
assertEquals("c", it.next());
}
@Test
public void testCreateIteratorPattern() {
String s = "a\nb\rc";
Iterator<?> it = ObjectHelper.createIterator(s, "\n|\r", false, true);
assertEquals("a", it.next());
assertEquals("b", it.next());
assertEquals("c", it.next());
}
@Test
public void testCreateIteratorWithStringAndCommaSeparator() {
String s = "a,b,c";
Iterator<?> it = ObjectHelper.createIterator(s, ",");
assertEquals("a", it.next());
assertEquals("b", it.next());
assertEquals("c", it.next());
}
@Test
public void testCreateIteratorWithStringAndCommaSeparatorEmptyString() {
String s = "";
Iterator<?> it = ObjectHelper.createIterator(s, ",", true);
assertEquals("", it.next());
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertEquals("no more element available for '' at the index 1", nsee.getMessage());
}
}
@Test
public void testCreateIteratorWithStringAndSemiColonSeparator() {
String s = "a;b;c";
Iterator<?> it = ObjectHelper.createIterator(s, ";");
assertEquals("a", it.next());
assertEquals("b", it.next());
assertEquals("c", it.next());
}
@Test
public void testCreateIteratorWithStringAndCommaInParanthesesSeparator() {
String s = "bean:foo?method=bar('A','B','C')";
Iterator<?> it = ObjectHelper.createIterator(s, ",");
assertEquals("bean:foo?method=bar('A','B','C')", it.next());
}
@Test
public void testCreateIteratorWithStringAndCommaInParanthesesSeparatorTwo() {
String s = "bean:foo?method=bar('A','B','C'),bean:bar?method=cool('A','Hello,World')";
Iterator<?> it = ObjectHelper.createIterator(s, ",");
assertEquals("bean:foo?method=bar('A','B','C')", it.next());
assertEquals("bean:bar?method=cool('A','Hello,World')", it.next());
}
// CHECKSTYLE:OFF
@Test
public void testCreateIteratorWithPrimitiveArrayTypes() {
Iterator<?> it = ObjectHelper.createIterator(new byte[] {13, Byte.MAX_VALUE, 7, Byte.MIN_VALUE}, null);
assertTrue(it.hasNext());
assertEquals(Byte.valueOf((byte) 13), it.next());
assertTrue(it.hasNext());
assertEquals(Byte.MAX_VALUE, it.next());
assertTrue(it.hasNext());
assertEquals(Byte.valueOf((byte) 7), it.next());
assertTrue(it.hasNext());
assertEquals(Byte.MIN_VALUE, it.next());
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[B@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 4"));
}
it = ObjectHelper.createIterator(new byte[] {}, null);
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[B@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 0"));
}
it = ObjectHelper.createIterator(new short[] {13, Short.MAX_VALUE, 7, Short.MIN_VALUE}, null);
assertTrue(it.hasNext());
assertEquals(Short.valueOf((short) 13), it.next());
assertTrue(it.hasNext());
assertEquals(Short.MAX_VALUE, it.next());
assertTrue(it.hasNext());
assertEquals(Short.valueOf((short) 7), it.next());
assertTrue(it.hasNext());
assertEquals(Short.MIN_VALUE, it.next());
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[S@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 4"));
}
it = ObjectHelper.createIterator(new short[] {}, null);
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[S@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 0"));
}
it = ObjectHelper.createIterator(new int[] {13, Integer.MAX_VALUE, 7, Integer.MIN_VALUE}, null);
assertTrue(it.hasNext());
assertEquals(Integer.valueOf(13), it.next());
assertTrue(it.hasNext());
assertEquals(Integer.MAX_VALUE, it.next());
assertTrue(it.hasNext());
assertEquals(Integer.valueOf(7), it.next());
assertTrue(it.hasNext());
assertEquals(Integer.MIN_VALUE, it.next());
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[I@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 4"));
}
it = ObjectHelper.createIterator(new int[] {}, null);
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[I@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 0"));
}
it = ObjectHelper.createIterator(new long[] {13L, Long.MAX_VALUE, 7L, Long.MIN_VALUE}, null);
assertTrue(it.hasNext());
assertEquals(Long.valueOf(13), it.next());
assertTrue(it.hasNext());
assertEquals(Long.MAX_VALUE, it.next());
assertTrue(it.hasNext());
assertEquals(Long.valueOf(7), it.next());
assertTrue(it.hasNext());
assertEquals(Long.MIN_VALUE, it.next());
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[J@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 4"));
}
it = ObjectHelper.createIterator(new long[] {}, null);
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[J@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 0"));
}
it = ObjectHelper.createIterator(new float[] {13.7F, Float.MAX_VALUE, 7.13F, Float.MIN_VALUE}, null);
assertTrue(it.hasNext());
assertEquals(Float.valueOf(13.7F), it.next());
assertTrue(it.hasNext());
assertEquals(Float.MAX_VALUE, it.next());
assertTrue(it.hasNext());
assertEquals(Float.valueOf(7.13F), it.next());
assertTrue(it.hasNext());
assertEquals(Float.MIN_VALUE, it.next());
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[F@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 4"));
}
it = ObjectHelper.createIterator(new float[] {}, null);
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[F@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 0"));
}
it = ObjectHelper.createIterator(new double[] {13.7D, Double.MAX_VALUE, 7.13D, Double.MIN_VALUE}, null);
assertTrue(it.hasNext());
assertEquals(Double.valueOf(13.7D), it.next());
assertTrue(it.hasNext());
assertEquals(Double.MAX_VALUE, it.next());
assertTrue(it.hasNext());
assertEquals(Double.valueOf(7.13D), it.next());
assertTrue(it.hasNext());
assertEquals(Double.MIN_VALUE, it.next());
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[D@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 4"));
}
it = ObjectHelper.createIterator(new double[] {}, null);
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[D@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 0"));
}
it = ObjectHelper.createIterator(new char[] {'C', 'a', 'm', 'e', 'l'}, null);
assertTrue(it.hasNext());
assertEquals(Character.valueOf('C'), it.next());
assertTrue(it.hasNext());
assertEquals(Character.valueOf('a'), it.next());
assertTrue(it.hasNext());
assertEquals(Character.valueOf('m'), it.next());
assertTrue(it.hasNext());
assertEquals(Character.valueOf('e'), it.next());
assertTrue(it.hasNext());
assertEquals(Character.valueOf('l'), it.next());
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[C@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 5"));
}
it = ObjectHelper.createIterator(new char[] {}, null);
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[C@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 0"));
}
it = ObjectHelper.createIterator(new boolean[] {false, true, false, true, true}, null);
assertTrue(it.hasNext());
assertEquals(Boolean.FALSE, it.next());
assertTrue(it.hasNext());
assertEquals(Boolean.TRUE, it.next());
assertTrue(it.hasNext());
assertEquals(Boolean.FALSE, it.next());
assertTrue(it.hasNext());
assertEquals(Boolean.TRUE, it.next());
assertTrue(it.hasNext());
assertEquals(Boolean.TRUE, it.next());
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[Z@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 5"));
}
it = ObjectHelper.createIterator(new boolean[] {}, null);
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for '[Z@"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 0"));
}
}
// CHECKSTYLE:ON
@Test
public void testArrayAsIterator() throws Exception {
String[] data = {"a", "b"};
Iterator<?> iter = ObjectHelper.createIterator(data);
assertTrue("should have next", iter.hasNext());
Object a = iter.next();
assertEquals("a", "a", a);
assertTrue("should have next", iter.hasNext());
Object b = iter.next();
assertEquals("b", "b", b);
assertFalse("should not have a next", iter.hasNext());
}
@Test
public void testIsEmpty() {
assertTrue(org.apache.camel.util.ObjectHelper.isEmpty(null));
assertTrue(org.apache.camel.util.ObjectHelper.isEmpty(""));
assertTrue(org.apache.camel.util.ObjectHelper.isEmpty(" "));
assertFalse(org.apache.camel.util.ObjectHelper.isEmpty("A"));
assertFalse(org.apache.camel.util.ObjectHelper.isEmpty(" A"));
assertFalse(org.apache.camel.util.ObjectHelper.isEmpty(" A "));
assertFalse(org.apache.camel.util.ObjectHelper.isEmpty(new Object()));
}
@Test
public void testIsNotEmpty() {
assertFalse(org.apache.camel.util.ObjectHelper.isNotEmpty(null));
assertFalse(org.apache.camel.util.ObjectHelper.isNotEmpty(""));
assertFalse(org.apache.camel.util.ObjectHelper.isNotEmpty(" "));
assertTrue(org.apache.camel.util.ObjectHelper.isNotEmpty("A"));
assertTrue(org.apache.camel.util.ObjectHelper.isNotEmpty(" A"));
assertTrue(org.apache.camel.util.ObjectHelper.isNotEmpty(" A "));
assertTrue(org.apache.camel.util.ObjectHelper.isNotEmpty(new Object()));
}
@Test
public void testIteratorWithComma() {
Iterator<?> it = ObjectHelper.createIterator("Claus,Jonathan");
assertEquals("Claus", it.next());
assertEquals("Jonathan", it.next());
assertEquals(false, it.hasNext());
}
@Test
public void testIteratorWithOtherDelimiter() {
Iterator<?> it = ObjectHelper.createIterator("Claus#Jonathan", "#");
assertEquals("Claus", it.next());
assertEquals("Jonathan", it.next());
assertEquals(false, it.hasNext());
}
@Test
public void testIteratorEmpty() {
Iterator<?> it = ObjectHelper.createIterator("");
assertEquals(false, it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertEquals("no more element available for '' at the index 0", nsee.getMessage());
}
it = ObjectHelper.createIterator(" ");
assertEquals(false, it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertEquals("no more element available for ' ' at the index 0", nsee.getMessage());
}
it = ObjectHelper.createIterator(null);
assertEquals(false, it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
}
}
@Test
public void testIteratorIdempotentNext() {
Iterator<?> it = ObjectHelper.createIterator("a");
assertTrue(it.hasNext());
assertTrue(it.hasNext());
it.next();
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertEquals("no more element available for 'a' at the index 1", nsee.getMessage());
}
}
@Test
public void testIteratorIdempotentNextWithNodeList() {
NodeList nodeList = new NodeList() {
public Node item(int index) {
return null;
}
public int getLength() {
return 1;
}
};
Iterator<?> it = ObjectHelper.createIterator(nodeList);
assertTrue(it.hasNext());
assertTrue(it.hasNext());
it.next();
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertTrue(nsee.getMessage(), nsee.getMessage().startsWith("no more element available for 'org.apache.camel.util.ObjectHelperTest$"));
assertTrue(nsee.getMessage(), nsee.getMessage().endsWith("at the index 1"));
}
}
@Test
public void testGetCamelContextPropertiesWithPrefix() {
CamelContext context = new DefaultCamelContext();
Map<String, String> properties = context.getGlobalOptions();
properties.put("camel.object.helper.test1", "test1");
properties.put("camel.object.helper.test2", "test2");
properties.put("camel.object.test", "test");
Properties result = CamelContextHelper.getCamelPropertiesWithPrefix("camel.object.helper.", context);
assertEquals("Get a wrong size properties", 2, result.size());
assertEquals("It should contain the test1", "test1", result.get("test1"));
assertEquals("It should contain the test2", "test2", result.get("test2"));
}
@Test
public void testEvaluateAsPredicate() throws Exception {
assertEquals(false, org.apache.camel.util.ObjectHelper.evaluateValuePredicate(null));
assertEquals(true, org.apache.camel.util.ObjectHelper.evaluateValuePredicate(123));
assertEquals(true, org.apache.camel.util.ObjectHelper.evaluateValuePredicate("true"));
assertEquals(true, org.apache.camel.util.ObjectHelper.evaluateValuePredicate("TRUE"));
assertEquals(false, org.apache.camel.util.ObjectHelper.evaluateValuePredicate("false"));
assertEquals(false, org.apache.camel.util.ObjectHelper.evaluateValuePredicate("FALSE"));
assertEquals(true, org.apache.camel.util.ObjectHelper.evaluateValuePredicate("foobar"));
assertEquals(true, org.apache.camel.util.ObjectHelper.evaluateValuePredicate(""));
assertEquals(true, org.apache.camel.util.ObjectHelper.evaluateValuePredicate(" "));
List<String> list = new ArrayList<>();
assertEquals(false, org.apache.camel.util.ObjectHelper.evaluateValuePredicate(list));
list.add("foo");
assertEquals(true, org.apache.camel.util.ObjectHelper.evaluateValuePredicate(list));
}
@Test
public void testIsPrimitiveArrayType() {
assertTrue(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(byte[].class));
assertTrue(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(short[].class));
assertTrue(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(int[].class));
assertTrue(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(long[].class));
assertTrue(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(float[].class));
assertTrue(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(double[].class));
assertTrue(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(char[].class));
assertTrue(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(boolean[].class));
assertFalse(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(Object[].class));
assertFalse(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(Byte[].class));
assertFalse(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(Short[].class));
assertFalse(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(Integer[].class));
assertFalse(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(Long[].class));
assertFalse(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(Float[].class));
assertFalse(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(Double[].class));
assertFalse(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(Character[].class));
assertFalse(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(Boolean[].class));
assertFalse(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(Void[].class));
assertFalse(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(CamelContext[].class));
assertFalse(org.apache.camel.util.ObjectHelper.isPrimitiveArrayType(null));
}
@Test
public void testGetDefaultCharSet() {
assertNotNull(org.apache.camel.util.ObjectHelper.getDefaultCharacterSet());
}
@Test
public void testConvertPrimitiveTypeToWrapper() {
assertEquals("java.lang.Integer", org.apache.camel.util.ObjectHelper.convertPrimitiveTypeToWrapperType(int.class).getName());
assertEquals("java.lang.Long", org.apache.camel.util.ObjectHelper.convertPrimitiveTypeToWrapperType(long.class).getName());
assertEquals("java.lang.Double", org.apache.camel.util.ObjectHelper.convertPrimitiveTypeToWrapperType(double.class).getName());
assertEquals("java.lang.Float", org.apache.camel.util.ObjectHelper.convertPrimitiveTypeToWrapperType(float.class).getName());
assertEquals("java.lang.Short", org.apache.camel.util.ObjectHelper.convertPrimitiveTypeToWrapperType(short.class).getName());
assertEquals("java.lang.Byte", org.apache.camel.util.ObjectHelper.convertPrimitiveTypeToWrapperType(byte.class).getName());
assertEquals("java.lang.Boolean", org.apache.camel.util.ObjectHelper.convertPrimitiveTypeToWrapperType(boolean.class).getName());
assertEquals("java.lang.Character", org.apache.camel.util.ObjectHelper.convertPrimitiveTypeToWrapperType(char.class).getName());
// non primitive just fall through
assertEquals("java.lang.Object", org.apache.camel.util.ObjectHelper.convertPrimitiveTypeToWrapperType(Object.class).getName());
}
@Test
public void testAsString() {
String[] args = new String[] {"foo", "bar"};
String out = org.apache.camel.util.ObjectHelper.asString(args);
assertNotNull(out);
assertEquals("{foo, bar}", out);
}
@Test
public void testName() {
assertEquals("java.lang.Integer", org.apache.camel.util.ObjectHelper.name(Integer.class));
assertEquals(null, org.apache.camel.util.ObjectHelper.name(null));
}
@Test
public void testClassName() {
assertEquals("java.lang.Integer", org.apache.camel.util.ObjectHelper.className(Integer.valueOf("5")));
assertEquals(null, org.apache.camel.util.ObjectHelper.className(null));
}
@Test
public void testGetSystemPropertyDefault() {
assertEquals("foo", org.apache.camel.util.ObjectHelper.getSystemProperty("CamelFooDoesNotExist", "foo"));
}
@Test
public void testGetSystemPropertyBooleanDefault() {
assertEquals(true, org.apache.camel.util.ObjectHelper.getSystemProperty("CamelFooDoesNotExist", Boolean.TRUE));
}
@Test
public void testMatches() {
List<Object> data = new ArrayList<>();
data.add("foo");
data.add("bar");
assertEquals(true, org.apache.camel.util.ObjectHelper.matches(data));
data.clear();
data.add(Boolean.FALSE);
data.add("bar");
assertEquals(false, org.apache.camel.util.ObjectHelper.matches(data));
data.clear();
assertEquals(false, org.apache.camel.util.ObjectHelper.matches(data));
}
@Test
public void testToBoolean() {
assertEquals(Boolean.TRUE, org.apache.camel.util.ObjectHelper.toBoolean(Boolean.TRUE));
assertEquals(Boolean.TRUE, org.apache.camel.util.ObjectHelper.toBoolean("true"));
assertEquals(Boolean.TRUE, org.apache.camel.util.ObjectHelper.toBoolean(Integer.valueOf("1")));
assertEquals(Boolean.FALSE, org.apache.camel.util.ObjectHelper.toBoolean(Integer.valueOf("0")));
assertEquals(null, org.apache.camel.util.ObjectHelper.toBoolean(new Date()));
}
@Test
public void testIteratorWithMessage() {
Message msg = new DefaultMessage(new DefaultCamelContext());
msg.setBody("a,b,c");
Iterator<?> it = ObjectHelper.createIterator(msg);
assertEquals("a", it.next());
assertEquals("b", it.next());
assertEquals("c", it.next());
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
}
}
@Test
public void testIteratorWithEmptyMessage() {
Message msg = new DefaultMessage(new DefaultCamelContext());
msg.setBody("");
Iterator<?> it = ObjectHelper.createIterator(msg);
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
assertEquals("no more element available for '' at the index 0", nsee.getMessage());
}
}
@Test
public void testIteratorWithNullMessage() {
Message msg = new DefaultMessage(new DefaultCamelContext());
msg.setBody(null);
Iterator<?> it = ObjectHelper.createIterator(msg);
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
}
}
@Test
public void testIterable() {
final List<String> data = new ArrayList<>();
data.add("A");
data.add("B");
data.add("C");
Iterable<String> itb = new Iterable<String>() {
public Iterator<String> iterator() {
return data.iterator();
}
};
Iterator<?> it = ObjectHelper.createIterator(itb);
assertEquals("A", it.next());
assertEquals("B", it.next());
assertEquals("C", it.next());
assertFalse(it.hasNext());
try {
it.next();
fail("Should have thrown exception");
} catch (NoSuchElementException nsee) {
// expected
}
}
@Test
public void testLookupConstantFieldValue() {
assertEquals("CamelFileName", org.apache.camel.util.ObjectHelper.lookupConstantFieldValue(Exchange.class, "FILE_NAME"));
assertEquals(null, org.apache.camel.util.ObjectHelper.lookupConstantFieldValue(Exchange.class, "XXX"));
assertEquals(null, org.apache.camel.util.ObjectHelper.lookupConstantFieldValue(null, "FILE_NAME"));
}
@Test
public void testHasDefaultPublicNoArgConstructor() {
assertTrue(org.apache.camel.util.ObjectHelper.hasDefaultPublicNoArgConstructor(ObjectHelperTest.class));
assertFalse(org.apache.camel.util.ObjectHelper.hasDefaultPublicNoArgConstructor(MyStaticClass.class));
}
@Test
public void testIdentityHashCode() {
MyDummyObject dummy = new MyDummyObject("Camel");
String code = org.apache.camel.util.ObjectHelper.getIdentityHashCode(dummy);
String code2 = org.apache.camel.util.ObjectHelper.getIdentityHashCode(dummy);
assertEquals(code, code2);
MyDummyObject dummyB = new MyDummyObject("Camel");
String code3 = org.apache.camel.util.ObjectHelper.getIdentityHashCode(dummyB);
assertNotSame(code, code3);
}
@Test
public void testIsNaN() throws Exception {
assertTrue(org.apache.camel.util.ObjectHelper.isNaN(Float.NaN));
assertTrue(org.apache.camel.util.ObjectHelper.isNaN(Double.NaN));
assertFalse(org.apache.camel.util.ObjectHelper.isNaN(null));
assertFalse(org.apache.camel.util.ObjectHelper.isNaN(""));
assertFalse(org.apache.camel.util.ObjectHelper.isNaN("1.0"));
assertFalse(org.apache.camel.util.ObjectHelper.isNaN(1));
assertFalse(org.apache.camel.util.ObjectHelper.isNaN(1.5f));
assertFalse(org.apache.camel.util.ObjectHelper.isNaN(1.5d));
assertFalse(org.apache.camel.util.ObjectHelper.isNaN(false));
assertFalse(org.apache.camel.util.ObjectHelper.isNaN(true));
}
@Test
public void testNotNull() {
Long expected = 3L;
Long actual = org.apache.camel.util.ObjectHelper.notNull(expected, "expected");
assertSame("Didn't get the same object back!", expected, actual);
Long actual2 = org.apache.camel.util.ObjectHelper.notNull(expected, "expected", "holder");
assertSame("Didn't get the same object back!", expected, actual2);
Long expected2 = null;
try {
org.apache.camel.util.ObjectHelper.notNull(expected2, "expected2");
fail("Should have thrown exception");
} catch (IllegalArgumentException iae) {
assertEquals("expected2 must be specified", iae.getMessage());
}
try {
org.apache.camel.util.ObjectHelper.notNull(expected2, "expected2", "holder");
fail("Should have thrown exception");
} catch (IllegalArgumentException iae) {
assertEquals("expected2 must be specified on: holder", iae.getMessage());
}
}
@Test
public void testSameMethodIsOverride() throws Exception {
Method m = MyOtherFooBean.class.getMethod("toString", Object.class);
assertTrue(org.apache.camel.util.ObjectHelper.isOverridingMethod(m, m, false));
}
@Test
public void testOverloadIsNotOverride() throws Exception {
Method m1 = MyOtherFooBean.class.getMethod("toString", Object.class);
Method m2 = MyOtherFooBean.class.getMethod("toString", String.class);
assertFalse(org.apache.camel.util.ObjectHelper.isOverridingMethod(m2, m1, false));
}
@Test
public void testOverrideEquivalentSignatureFromSiblingClassIsNotOverride() throws Exception {
Method m1 = Double.class.getMethod("intValue");
Method m2 = Float.class.getMethod("intValue");
assertFalse(org.apache.camel.util.ObjectHelper.isOverridingMethod(m2, m1, false));
}
@Test
public void testOverrideEquivalentSignatureFromUpperClassIsOverride() throws Exception {
Method m1 = Double.class.getMethod("intValue");
Method m2 = Number.class.getMethod("intValue");
assertTrue(org.apache.camel.util.ObjectHelper.isOverridingMethod(m2, m1, false));
}
@Test
public void testInheritedMethodCanOverrideInterfaceMethod() throws Exception {
Method m1 = AbstractClassSize.class.getMethod("size");
Method m2 = InterfaceSize.class.getMethod("size");
assertTrue(org.apache.camel.util.ObjectHelper.isOverridingMethod(Clazz.class, m2, m1, false));
}
@Test
public void testNonInheritedMethodCantOverrideInterfaceMethod() throws Exception {
Method m1 = AbstractClassSize.class.getMethod("size");
Method m2 = InterfaceSize.class.getMethod("size");
assertFalse(org.apache.camel.util.ObjectHelper.isOverridingMethod(InterfaceSize.class, m2, m1, false));
}
}
| apache-2.0 |
shardingjdbc/sharding-jdbc | shardingsphere-underlying/shardingsphere-rewrite/shardingsphere-rewrite-engine/src/test/java/org/apache/shardingsphere/underlying/rewrite/impl/StandardParameterBuilderTest.java | 1764 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.underlying.rewrite.impl;
import org.apache.shardingsphere.underlying.rewrite.parameter.builder.impl.StandardParameterBuilder;
import org.junit.Before;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
public final class StandardParameterBuilderTest {
private final List<Object> parameters = Arrays.asList(1, 2, 1, 5);
private StandardParameterBuilder parameterBuilder;
@Before
public void setUp() {
parameterBuilder = new StandardParameterBuilder(parameters);
parameterBuilder.addAddedParameters(4, Collections.singleton(7));
parameterBuilder.addRemovedParameters(1);
}
@Test
public void assertGetParameters() {
assertThat(parameterBuilder.getParameters(), is(Arrays.<Object>asList(1, 1, 5, 7)));
}
}
| apache-2.0 |
wso2/carbon-identity-framework | components/identity-mgt/org.wso2.carbon.identity.mgt.endpoint.util/src/main/java/org/wso2/carbon/identity/mgt/endpoint/util/serviceclient/UserIdentityManagementAdminServiceClient.java | 3989 | /*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.mgt.endpoint.util.serviceclient;
import org.apache.axis2.AxisFault;
import org.apache.axis2.client.ServiceClient;
import org.wso2.carbon.identity.mgt.endpoint.util.IdentityManagementEndpointConstants;
import org.wso2.carbon.identity.mgt.endpoint.util.IdentityManagementServiceUtil;
import org.wso2.carbon.identity.mgt.stub.UserIdentityManagementAdminServiceIdentityMgtServiceExceptionException;
import org.wso2.carbon.identity.mgt.stub.UserIdentityManagementAdminServiceStub;
import org.wso2.carbon.identity.mgt.stub.dto.ChallengeQuestionDTO;
import org.wso2.carbon.identity.mgt.stub.dto.UserChallengesDTO;
import java.rmi.RemoteException;
/**
* This class invokes the client operations of UserIdentityManagementService.
*/
public class UserIdentityManagementAdminServiceClient {
private UserIdentityManagementAdminServiceStub stub;
/**
* Instantiate UserIdentityManagementAdminServiceClient instance.
*
* @throws AxisFault
*/
public UserIdentityManagementAdminServiceClient() throws AxisFault {
StringBuilder builder = new StringBuilder();
String serviceURL = null;
serviceURL = builder.append(IdentityManagementServiceUtil.getInstance().getServiceContextURL())
.append(IdentityManagementEndpointConstants.ServiceEndpoints.USER_IDENTITY_MANAGEMENT_SERVICE)
.toString().replaceAll("(?<!(http:|https:))//", "/");
stub = new UserIdentityManagementAdminServiceStub(serviceURL);
ServiceClient client = stub._getServiceClient();
IdentityManagementServiceUtil.getInstance().authenticate(client);
}
/**
* Returns all challenge questions configured in the IdP.
*
* @return an array of ChallengeQuestionDTO instances
* @throws UserIdentityManagementAdminServiceIdentityMgtServiceExceptionException
* @throws RemoteException
*/
public ChallengeQuestionDTO[] getAllChallengeQuestions()
throws UserIdentityManagementAdminServiceIdentityMgtServiceExceptionException, RemoteException {
return stub.getAllChallengeQuestions();
}
/**
* Sets challenge questions in the IdP.
*
* @param challengeQuestionDTOs an array of ChallengeQuestionDTO instances
* @throws UserIdentityManagementAdminServiceIdentityMgtServiceExceptionException
* @throws RemoteException
*/
public void setChallengeQuestions(ChallengeQuestionDTO[] challengeQuestionDTOs)
throws UserIdentityManagementAdminServiceIdentityMgtServiceExceptionException, RemoteException {
stub.setChallengeQuestions(challengeQuestionDTOs);
}
/**
* Sets a chosen set of challenge questions and their answers for the user.
*
* @param userName username of the user
* @param userChallengesDTOs an array of UserChallengesDTO instances
* @throws UserIdentityManagementAdminServiceIdentityMgtServiceExceptionException
* @throws RemoteException
*/
public void setChallengeQuestionsOfUser(String userName, UserChallengesDTO[] userChallengesDTOs)
throws UserIdentityManagementAdminServiceIdentityMgtServiceExceptionException, RemoteException {
stub.setChallengeQuestionsOfUser(userName, userChallengesDTOs);
}
}
| apache-2.0 |
kaazing/netx | ws/src/main/java/org/kaazing/netx/ws/internal/util/OptimisticReentrantLock.java | 3417 | /**
* Copyright 2007-2015, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.netx.ws.internal.util;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
// ### TODO: 1. happens-before relationship should be established between the thread that releases the lock and the next thread
// that acquires the lock.
// 2. Evaluate with Martin's sun.misc.Unsafe based AtomicSequence implementation.
// 3. Implement fairness? It's expensive. We don't want to create garbage for fairness. Also, it's not clear whether
// it is needed.
public class OptimisticReentrantLock implements Lock {
private final AtomicReference<Thread> owner;
private final AtomicInteger stamp;
public OptimisticReentrantLock() {
this.owner = new AtomicReference<Thread>(null);
this.stamp = new AtomicInteger(0);
}
@Override
public void lock() {
Thread currentThread = Thread.currentThread();
if (currentThread == owner.get()) {
stamp.incrementAndGet();
}
else {
while (!owner.compareAndSet(null, currentThread)) {
// Keep spinning till the lock is acquired.
}
stamp.set(1);
}
}
@Override
public void unlock() {
Thread currentThread = Thread.currentThread();
if (currentThread == owner.get() && stamp.decrementAndGet() == 0) {
// Order in which atomics are updated is important.
// stamp MUST be zero before nulling the owner.
// Defeat unlock / lock race by setting owner to null only if not already updated.
owner.compareAndSet(currentThread, null);
}
}
@Override
public void lockInterruptibly() throws InterruptedException {
throw new UnsupportedOperationException("Unsupported operation");
}
@Override
public boolean tryLock() {
Thread currentThread = Thread.currentThread();
if (currentThread == owner.get()) {
stamp.incrementAndGet();
return true;
}
boolean locked = owner.compareAndSet(null, currentThread);
if (locked) {
stamp.set(1);
}
return locked;
}
@Override
public boolean tryLock(long time, TimeUnit unit) throws InterruptedException {
throw new UnsupportedOperationException("Unsupported operation");
}
@Override
public Condition newCondition() {
throw new UnsupportedOperationException("Unsupported operation");
}
public Thread getOwner() {
return owner.get();
}
public int getStamp() {
return stamp.get();
}
}
| apache-2.0 |
bshp/midPoint | model/model-impl/src/main/java/com/evolveum/midpoint/model/impl/controller/SelfTestData.java | 52401 | /*
* Copyright (c) 2010-2013 Evolveum and contributors
*
* This work is dual-licensed under the Apache License 2.0
* and European Union Public License. See LICENSE file for details.
*/
package com.evolveum.midpoint.model.impl.controller;
/**
* There are som ugly data and long strings. So let's keep this separate to keep the main code readable.
*
* @author Radovan Semancik
*
*/
public class SelfTestData {
/**
* Long text with national characters. This is used to test whether the database can store a long text
* and that it maintains national characters.
*/
public static final String POLICIJA =
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pólicíja, pólicíja, Sálašáry, práva Jova. Z césty príva, z césty práva, sýmpatika, korpora. " +
"Populáry, Karpatula. Juvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. " +
"Pōlicĭja, pōlicĭja, Sãlaŝåry, pràva Jova. Z céßty prïva, z cèßty pråva, sŷmpatika, korpora. " +
"Populáry, Karpatula. Ðuvá svorno policána. Kerléš na strach, policíja. Bumtarára, bumtarára, bum. ";
}
| apache-2.0 |
florindragos/open-Autoscaler | servicebroker/src/main/java/org/cloudfoundry/autoscaler/servicebroker/nls/messages_zh.java | 1366 | /**
* WARNING! THIS FILE IS AUTOMATICALLY GENERATED! DO NOT MODIFY IT!
* Generated on Mon Feb 29 13:37:57 CST 2016
*/
package org.cloudfoundry.autoscaler.servicebroker.nls;
public class messages_zh extends java.util.ListResourceBundle
{
public Object[][] getContents() {
return resources;
}
private final static Object[][] resources= {
{ "AlreadyBindedAnotherService", "CWSCV2004E: \u5176\u4ed6 Auto-Scaling \u670d\u52a1\u5df2\u4e0e\u5e94\u7528\u7a0b\u5e8f\u7ed1\u5b9a\u3002" },
{ "BindServiceFail", "CWSCV2003E: Auto-Scaling \u670d\u52a1\u4ee3\u7406\u7a0b\u5e8f\u65e0\u6cd5\u7ed1\u5b9a\u670d\u52a1\u5b9e\u4f8b {0}\u3002" },
{ "ConfigurationLoadFail", "CWSCV2001E: \u672a\u6b63\u786e\u914d\u7f6e Auto-Scaling \u670d\u52a1\u4ee3\u7406\u7a0b\u5e8f\u3002" },
{ "DeleteServiceFail", "CWSCV2005E: Auto-Scaling \u670d\u52a1\u4ee3\u7406\u7a0b\u5e8f\u65e0\u6cd5\u5220\u9664\u670d\u52a1\u5b9e\u4f8b {0}\u3002" },
{ "ServerUrlMappingNotFound", "CWSCV2006E: \u6b64 Auto-Scaling \u670d\u52a1\u5df2\u4e2d\u65ad\u3002\u8bf7\u5220\u9664\u8be5\u670d\u52a1\u5e76\u5c06\u8be5\u5e94\u7528\u7a0b\u5e8f\u91cd\u65b0\u7ed1\u5b9a\u81f3\u65b0\u670d\u52a1\u3002" },
{ "UnbindServiceFail", "CWSCV2002E: Auto-Scaling \u670d\u52a1\u4ee3\u7406\u7a0b\u5e8f\u65e0\u6cd5\u53d6\u6d88\u7ed1\u5b9a\u670d\u52a1\u5b9e\u4f8b {0}\u3002" }
};
}
| apache-2.0 |
monetate/druid | indexing-service/src/main/java/org/apache/druid/indexing/seekablestream/SeekableStreamIndexTaskClient.java | 13540 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.seekablestream;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.ListenableFuture;
import org.apache.druid.indexing.common.IndexTaskClient;
import org.apache.druid.indexing.common.RetryPolicy;
import org.apache.druid.indexing.common.TaskInfoProvider;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.RE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.jackson.JacksonUtils;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.apache.druid.java.util.http.client.HttpClient;
import org.apache.druid.java.util.http.client.response.StringFullResponseHolder;
import org.apache.druid.segment.incremental.ParseExceptionReport;
import org.jboss.netty.handler.codec.http.HttpMethod;
import org.jboss.netty.handler.codec.http.HttpResponseStatus;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
public abstract class SeekableStreamIndexTaskClient<PartitionIdType, SequenceOffsetType> extends IndexTaskClient
{
private static final TypeReference<List<ParseExceptionReport>> TYPE_REFERENCE_LIST_PARSE_EXCEPTION_REPORT =
new TypeReference<List<ParseExceptionReport>>()
{
};
private static final EmittingLogger log = new EmittingLogger(SeekableStreamIndexTaskClient.class);
public SeekableStreamIndexTaskClient(
HttpClient httpClient,
ObjectMapper jsonMapper,
TaskInfoProvider taskInfoProvider,
String dataSource,
int numThreads,
Duration httpTimeout,
long numRetries
)
{
super(httpClient, jsonMapper, taskInfoProvider, httpTimeout, dataSource, numThreads, numRetries);
}
public boolean stop(final String id, final boolean publish)
{
log.debug("Stop task[%s] publish[%s]", id, publish);
try {
final StringFullResponseHolder response = submitRequestWithEmptyContent(
id,
HttpMethod.POST,
"stop",
publish ? "publish=true" : null,
true
);
return isSuccess(response);
}
catch (NoTaskLocationException e) {
return false;
}
catch (TaskNotRunnableException e) {
log.info("Task [%s] couldn't be stopped because it is no longer running", id);
return true;
}
catch (Exception e) {
log.warn(e, "Exception while stopping task [%s]", id);
return false;
}
}
public boolean resume(final String id)
{
log.debug("Resume task[%s]", id);
try {
final StringFullResponseHolder response = submitRequestWithEmptyContent(id, HttpMethod.POST, "resume", null, true);
return isSuccess(response);
}
catch (NoTaskLocationException | IOException e) {
log.warn(e, "Exception while stopping task [%s]", id);
return false;
}
}
public Map<PartitionIdType, SequenceOffsetType> pause(final String id)
{
log.debug("Pause task[%s]", id);
try {
final StringFullResponseHolder response = submitRequestWithEmptyContent(
id,
HttpMethod.POST,
"pause",
null,
true
);
final HttpResponseStatus responseStatus = response.getStatus();
final String responseContent = response.getContent();
if (responseStatus.equals(HttpResponseStatus.OK)) {
log.info("Task [%s] paused successfully", id);
return deserializeMap(responseContent, Map.class, getPartitionType(), getSequenceType());
} else if (responseStatus.equals(HttpResponseStatus.ACCEPTED)) {
// The task received the pause request, but its status hasn't been changed yet.
final RetryPolicy retryPolicy = newRetryPolicy();
while (true) {
final SeekableStreamIndexTaskRunner.Status status = getStatus(id);
if (status == SeekableStreamIndexTaskRunner.Status.PAUSED) {
return getCurrentOffsets(id, true);
}
final Duration delay = retryPolicy.getAndIncrementRetryDelay();
if (delay == null) {
throw new ISE(
"Task [%s] failed to change its status from [%s] to [%s], aborting",
id,
status,
SeekableStreamIndexTaskRunner.Status.PAUSED
);
} else {
final long sleepTime = delay.getMillis();
log.info(
"Still waiting for task [%s] to change its status to [%s]; will try again in [%s]",
id,
SeekableStreamIndexTaskRunner.Status.PAUSED,
new Duration(sleepTime).toString()
);
Thread.sleep(sleepTime);
}
}
} else {
throw new ISE(
"Pause request for task [%s] failed with response [%s] : [%s]",
id,
responseStatus,
responseContent
);
}
}
catch (NoTaskLocationException e) {
log.error("Exception [%s] while pausing Task [%s]", e.getMessage(), id);
return ImmutableMap.of();
}
catch (IOException | InterruptedException e) {
throw new RE(e, "Exception [%s] while pausing Task [%s]", e.getMessage(), id);
}
}
public SeekableStreamIndexTaskRunner.Status getStatus(final String id)
{
log.debug("GetStatus task[%s]", id);
try {
final StringFullResponseHolder response = submitRequestWithEmptyContent(id, HttpMethod.GET, "status", null, true);
return deserialize(response.getContent(), SeekableStreamIndexTaskRunner.Status.class);
}
catch (NoTaskLocationException e) {
return SeekableStreamIndexTaskRunner.Status.NOT_STARTED;
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
@Nullable
public DateTime getStartTime(final String id)
{
log.debug("GetStartTime task[%s]", id);
try {
final StringFullResponseHolder response = submitRequestWithEmptyContent(id, HttpMethod.GET, "time/start", null, true);
return response.getContent() == null || response.getContent().isEmpty()
? null
: deserialize(response.getContent(), DateTime.class);
}
catch (NoTaskLocationException e) {
return null;
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
public Map<String, Object> getMovingAverages(final String id)
{
log.debug("GetMovingAverages task[%s]", id);
try {
final StringFullResponseHolder response = submitRequestWithEmptyContent(
id,
HttpMethod.GET,
"rowStats",
null,
true
);
if (response.getContent() == null || response.getContent().isEmpty()) {
log.warn("Got empty response when calling getMovingAverages, id[%s]", id);
return Collections.emptyMap();
}
return deserialize(response.getContent(), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT);
}
catch (NoTaskLocationException e) {
log.warn(e, "Got NoTaskLocationException when calling getMovingAverages, id[%s]", id);
return Collections.emptyMap();
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
public List<ParseExceptionReport> getParseErrors(final String id)
{
log.debug("getParseErrors task[%s]", id);
try {
final StringFullResponseHolder response = submitRequestWithEmptyContent(
id,
HttpMethod.GET,
"unparseableEvents",
null,
true
);
if (response.getContent() == null || response.getContent().isEmpty()) {
log.warn("Got empty response when calling getParseErrors, id[%s]", id);
return Collections.emptyList();
}
return deserialize(response.getContent(), TYPE_REFERENCE_LIST_PARSE_EXCEPTION_REPORT);
}
catch (NoTaskLocationException e) {
log.warn(e, "Got NoTaskLocationException when calling getParseErrors, id[%s]", id);
return Collections.emptyList();
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
public Map<PartitionIdType, SequenceOffsetType> getCurrentOffsets(final String id, final boolean retry)
{
log.debug("GetCurrentOffsets task[%s] retry[%s]", id, retry);
try {
final StringFullResponseHolder response = submitRequestWithEmptyContent(
id,
HttpMethod.GET,
"offsets/current",
null,
retry
);
return deserializeMap(response.getContent(), Map.class, getPartitionType(), getSequenceType());
}
catch (NoTaskLocationException e) {
return ImmutableMap.of();
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
public TreeMap<Integer, Map<PartitionIdType, SequenceOffsetType>> getCheckpoints(final String id, final boolean retry)
{
log.debug("GetCheckpoints task[%s] retry[%s]", id, retry);
try {
final StringFullResponseHolder response = submitRequestWithEmptyContent(id, HttpMethod.GET, "checkpoints", null, retry);
return deserializeNestedValueMap(
response.getContent(),
TreeMap.class,
Integer.class,
Map.class,
getPartitionType(),
getSequenceType()
);
}
catch (NoTaskLocationException e) {
return new TreeMap<>();
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
public ListenableFuture<TreeMap<Integer, Map<PartitionIdType, SequenceOffsetType>>> getCheckpointsAsync(
final String id,
final boolean retry
)
{
return doAsync(() -> getCheckpoints(id, retry));
}
public Map<PartitionIdType, SequenceOffsetType> getEndOffsets(final String id)
{
log.debug("GetEndOffsets task[%s]", id);
try {
final StringFullResponseHolder response = submitRequestWithEmptyContent(id, HttpMethod.GET, "offsets/end", null, true);
return deserializeMap(response.getContent(), Map.class, getPartitionType(), getSequenceType());
}
catch (NoTaskLocationException e) {
return ImmutableMap.of();
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
public boolean setEndOffsets(
final String id,
final Map<PartitionIdType, SequenceOffsetType> endOffsets,
final boolean finalize
) throws IOException
{
log.debug("SetEndOffsets task[%s] endOffsets[%s] finalize[%s]", id, endOffsets, finalize);
try {
final StringFullResponseHolder response = submitJsonRequest(
id,
HttpMethod.POST,
"offsets/end",
StringUtils.format("finish=%s", finalize),
serialize(endOffsets),
true
);
return isSuccess(response);
}
catch (NoTaskLocationException e) {
return false;
}
}
public ListenableFuture<Boolean> stopAsync(final String id, final boolean publish)
{
return doAsync(() -> stop(id, publish));
}
public ListenableFuture<Boolean> resumeAsync(final String id)
{
return doAsync(() -> resume(id));
}
public ListenableFuture<DateTime> getStartTimeAsync(final String id)
{
return doAsync(() -> getStartTime(id));
}
public ListenableFuture<Map<PartitionIdType, SequenceOffsetType>> pauseAsync(final String id)
{
return doAsync(() -> pause(id));
}
public ListenableFuture<Boolean> setEndOffsetsAsync(
final String id,
final Map<PartitionIdType, SequenceOffsetType> endOffsets,
final boolean finalize
)
{
return doAsync(() -> setEndOffsets(id, endOffsets, finalize));
}
public ListenableFuture<Map<PartitionIdType, SequenceOffsetType>> getCurrentOffsetsAsync(
final String id,
final boolean retry
)
{
return doAsync(() -> getCurrentOffsets(id, retry));
}
public ListenableFuture<Map<PartitionIdType, SequenceOffsetType>> getEndOffsetsAsync(final String id)
{
return doAsync(() -> getEndOffsets(id));
}
public ListenableFuture<Map<String, Object>> getMovingAveragesAsync(final String id)
{
return doAsync(() -> getMovingAverages(id));
}
public ListenableFuture<List<ParseExceptionReport>> getParseErrorsAsync(final String id)
{
return doAsync(() -> getParseErrors(id));
}
public ListenableFuture<SeekableStreamIndexTaskRunner.Status> getStatusAsync(final String id)
{
return doAsync(() -> getStatus(id));
}
protected abstract Class<PartitionIdType> getPartitionType();
protected abstract Class<SequenceOffsetType> getSequenceType();
}
| apache-2.0 |
Maghoumi/GP-Tracker | lib/ecj/ec/simple/SimpleFitness.java | 4901 | /*
Copyright 2006 by Sean Luke
Licensed under the Academic Free License version 3.0
See the file "LICENSE" for more information
*/
package ec.simple;
import ec.Fitness;
import ec.EvolutionState;
import ec.util.*;
import java.io.*;
/*
* SimpleFitness.java
*
* Created: Tue Aug 10 20:10:42 1999
* By: Sean Luke
*/
/**
* A simple default fitness, consisting of a single floating-point value
* where fitness A is superior to fitness B if and only if A > B.
* Fitness values may range from (-infinity,infinity) exclusive -- that is,
* you may not have infinite fitnesses.
*
* <p>Some kinds of selection methods require a more stringent definition of
* fitness. For example, FitProportionateSelection requires that fitnesses
* be non-negative (since it must place them into a proportionate distribution).
* You may wish to restrict yourself to values in [0,1] or [0,infinity) in
* such cases.
*
<p><b>Default Base</b><br>
simple.fitness
* @author Sean Luke
* @version 1.0
*/
public class SimpleFitness extends Fitness
{
protected float fitness;
protected boolean isIdeal;
public Parameter defaultBase()
{
return SimpleDefaults.base().push(P_FITNESS);
}
/**
Deprecated -- now redefined to set the fitness but ALWAYS say that it's not ideal.
If you need to specify that it's ideal, you should use the new function
setFitness(final EvolutionState state, float _f, boolean _isIdeal).
@deprecated
*/
public void setFitness(final EvolutionState state, float _f)
{
setFitness(state,_f,false);
}
public void setFitness(final EvolutionState state, float _f, boolean _isIdeal)
{
// we now allow f to be *any* value, positive or negative
if (_f == Float.POSITIVE_INFINITY || _f == Float.NEGATIVE_INFINITY || Float.isNaN(_f))
{
state.output.warning("Bad fitness: " + _f + ", setting to 0.");
fitness = 0;
}
else fitness = _f;
isIdeal = _isIdeal;
}
public float fitness()
{
return fitness;
}
public void setup(final EvolutionState state, Parameter base)
{
super.setup(state,base); // unnecessary but what the heck
}
public boolean isIdealFitness()
{
return isIdeal;
}
public boolean equivalentTo(final Fitness _fitness)
{
return _fitness.fitness() == fitness();
}
public boolean betterThan(final Fitness _fitness)
{
return _fitness.fitness() < fitness();
}
public String fitnessToString()
{
return FITNESS_PREAMBLE + Code.encode(fitness());
}
public String fitnessToStringForHumans()
{
return FITNESS_PREAMBLE + fitness();
}
/** Presently does not decode the fact that the fitness is ideal or not */
public void readFitness(final EvolutionState state,
final LineNumberReader reader)
throws IOException
{
setFitness(state, Code.readFloatWithPreamble(FITNESS_PREAMBLE, state, reader));
/*
int linenumber = reader.getLineNumber();
String s = reader.readLine();
if (s==null || s.length() < FITNESS_PREAMBLE.length()) // uh oh
state.output.fatal("Reading Line " + linenumber + ": " +
"Bad Fitness.");
DecodeReturn d = new DecodeReturn(s, FITNESS_PREAMBLE.length());
Code.decode(d);
if (d.type!=DecodeReturn.T_FLOAT)
state.output.fatal("Reading Line " + linenumber + ": " +
"Bad Fitness.");
setFitness(state,(float)d.d,false);
*/
}
public void writeFitness(final EvolutionState state,
final DataOutput dataOutput) throws IOException
{
dataOutput.writeFloat(fitness);
dataOutput.writeBoolean(isIdeal);
writeTrials(state, dataOutput);
}
public void readFitness(final EvolutionState state,
final DataInput dataInput) throws IOException
{
fitness = dataInput.readFloat();
isIdeal = dataInput.readBoolean();
readTrials(state, dataInput);
}
public void setToMeanOf(EvolutionState state, Fitness[] fitnesses)
{
// this is not numerically stable. Perhaps we should have a numerically stable algorithm for sums
// we're presuming it's not a very large number of elements, so it's probably not a big deal,
// since this function is meant to be used mostly for gathering trials together.
double f = 0;
boolean ideal = true;
for(int i = 0; i < fitnesses.length; i++)
{
SimpleFitness fit = (SimpleFitness)(fitnesses[i]);
f += fit.fitness;
ideal = ideal && fit.isIdeal;
}
f /= fitnesses.length;
fitness = (float)f;
isIdeal = ideal;
}
}
| apache-2.0 |
Sumei1009/HubTurbo | src/main/java/filter/lexer/Lexer.java | 2996 | package filter.lexer;
import filter.ParseException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class Lexer {
private static final boolean SKIP_WHITESPACE = true;
private static final Pattern NO_WHITESPACE = Pattern.compile("\\S");
private List<Rule> rules = Arrays.asList(
new Rule("AND|&&?", TokenType.AND),
new Rule("OR|\\|\\|?", TokenType.OR),
new Rule("NOT|~|!|-", TokenType.NOT),
// These have higher priority than Symbol
new Rule("\\d{4}-\\d{1,2}-\\d{1,2}", TokenType.DATE), // YYYY-MM?-DD?
new Rule("[A-Za-z]+\\s*:", TokenType.QUALIFIER),
new Rule("[A-Za-z0-9#][/A-Za-z0-9.'-]*", TokenType.SYMBOL),
new Rule("\\(", TokenType.LBRACKET),
new Rule("\\)", TokenType.RBRACKET),
new Rule("\\\"", TokenType.QUOTE),
new Rule(",", TokenType.COMMA),
new Rule("\\.\\.", TokenType.DOTDOT),
// These have higher priority than < and >
new Rule("<=", TokenType.LTE),
new Rule(">=", TokenType.GTE),
new Rule("<", TokenType.LT),
new Rule(">", TokenType.GT),
new Rule("\\*", TokenType.STAR)
);
private String input;
private int position;
public Lexer(String input) {
this.input = stripTrailingWhitespace(input);
this.position = 0;
}
private Pattern trailingWhitespace = Pattern.compile("\\s+$");
private String stripTrailingWhitespace(String input) {
return trailingWhitespace.matcher(input).replaceAll("");
}
private Token nextToken() {
if (position >= input.length()) {
return new Token(TokenType.EOF, "");
}
if (SKIP_WHITESPACE) {
Matcher matcher = NO_WHITESPACE.matcher(input).region(position, input.length());
boolean found = matcher.find();
if (!found) {
return new Token(TokenType.EOF, "");
}
position = matcher.start();
}
for (Rule r : rules) {
Matcher matcher = r.getPattern().matcher(input).region(position, input.length());
if (matcher.lookingAt()) {
String match = matcher.group();
position += match.length();
return new Token(r.getTokenType(), match);
}
}
throw new ParseException("Unrecognised token " + input.charAt(position) + " at " + position);
}
public ArrayList<Token> lex() {
ArrayList<Token> result = new ArrayList<>();
Token previous = null;
while (position < input.length()
&& (previous == null || previous.getType() != TokenType.EOF)) {
previous = nextToken();
result.add(previous);
}
result.add(nextToken()); // EOF
return result;
}
}
| apache-2.0 |
drankye/SSM | smart-hadoop-support/smart-hadoop/src/main/java/org/smartdata/hdfs/action/move/MoverExecutor.java | 7601 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.smartdata.hdfs.action.move;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.datatransfer.TrustedChannelResolver;
import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil;
import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.smartdata.hdfs.CompatibilityHelperLoader;
import org.smartdata.model.action.FileMovePlan;
import java.io.IOException;
import java.io.PrintStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* A light-weight executor for Mover.
*/
public class MoverExecutor {
static final Logger LOG = LoggerFactory.getLogger(MoverExecutor.class);
private Configuration conf;
private URI namenode;
private String fileName;
private NameNodeConnector nnc;
private DFSClient dfsClient;
private SaslDataTransferClient saslClient;
private int maxConcurrentMoves;
private int maxRetryTimes;
private ExecutorService moveExecutor;
private List<ReplicaMove> allMoves;
private Map<Long, Block> sourceBlockMap;
private Map<String, DatanodeInfo> sourceDatanodeMap;
private MoverStatus status;
private List<LocatedBlock> locatedBlocks;
public MoverExecutor(MoverStatus status, Configuration conf,
int maxRetryTimes, int maxConcurrentMoves) {
this.status = status;
this.conf = conf;
this.maxRetryTimes = maxRetryTimes;
this.maxConcurrentMoves = maxConcurrentMoves;
}
/**
* Execute a move action providing the schedule plan
* @param plan the schedule plan of mover
* @return number of failed moves
* @throws Exception
*/
public int executeMove(FileMovePlan plan, PrintStream resultOs, PrintStream logOs) throws Exception {
if (plan == null) {
throw new RuntimeException("Schedule plan for mover is null");
}
init(plan);
HdfsFileStatus fileStatus = dfsClient.getFileInfo(fileName);
if (fileStatus == null) {
throw new RuntimeException("File does not exist.");
}
// TODO: better checks
if (fileStatus.isDir() || fileStatus.getLen() < plan.getFileLength()) {
throw new RuntimeException("File has been changed after this action generated.");
}
locatedBlocks = dfsClient.getLocatedBlocks(fileName, 0, plan.getFileLength()).getLocatedBlocks();
parseSchedulePlan(plan);
moveExecutor = Executors.newFixedThreadPool(maxConcurrentMoves);
return doMove(resultOs, logOs);
}
/**
* Execute a move action providing the schedule plan.
*
* @param resultOs
* @param logOs
* @return
* @throws Exception
*/
public int doMove(PrintStream resultOs, PrintStream logOs) throws Exception {
// TODO: currently just retry failed moves, may need advanced schedule
for (int retryTimes = 0; retryTimes < maxRetryTimes; retryTimes ++) {
for (final ReplicaMove replicaMove : allMoves) {
moveExecutor.execute(new Runnable() {
@Override
public void run() {
replicaMove.run();
}
});
}
int[] stat = new int[2];
while (true) {
ReplicaMove.countStatus(allMoves, stat);
if (stat[0] == allMoves.size()) {
status.increaseMovedBlocks(stat[1]);
break;
}
Thread.sleep(1000);
}
int remaining = ReplicaMove.refreshMoverList(allMoves);
if (allMoves.size() == 0) {
LOG.info("{} succeeded", this);
return 0;
}
if (logOs != null) {
logOs.println("The " + (retryTimes + 1) + "/" + maxRetryTimes + " retry, remaining = " + remaining);
}
LOG.debug("{} : {} moves failed, start a new iteration", this, remaining);
}
int failedMoves = ReplicaMove.failedMoves(allMoves);
LOG.info("{} : failed with {} moves", this, failedMoves);
return failedMoves;
}
@VisibleForTesting
public int executeMove(FileMovePlan plan) throws Exception {
return executeMove(plan, null, null);
}
@Override
public String toString() {
return "MoverExecutor <" + namenode + ":" + fileName + ">";
}
private void init(FileMovePlan plan) throws IOException {
this.namenode = plan.getNamenode();
this.fileName = plan.getFileName();
this.nnc = new NameNodeConnector(namenode, conf);
this.saslClient = new SaslDataTransferClient(conf,
DataTransferSaslUtil.getSaslPropertiesResolver(conf),
TrustedChannelResolver.getInstance(conf), nnc.fallbackToSimpleAuth);
dfsClient = nnc.getDistributedFileSystem().getClient();
allMoves = new ArrayList<>();
}
private void parseSchedulePlan(FileMovePlan plan) throws IOException {
generateSourceMap();
List<String> sourceUuids = plan.getSourceUuids();
List<String> sourceStorageTypes = plan.getSourceStoragetypes();
List<String> targetIpAddrs = plan.getTargetIpAddrs();
List<Integer> targetXferPorts = plan.getTargetXferPorts();
List<String> targetStorageTypes = plan.getTargetStorageTypes();
List<Long> blockIds = plan.getBlockIds();
for (int planIndex = 0; planIndex < blockIds.size(); planIndex ++) {
// build block
Block block = sourceBlockMap.get(blockIds.get(planIndex));
// build source
DatanodeInfo sourceDatanode = sourceDatanodeMap.get(sourceUuids.get(planIndex));
StorageGroup source = new StorageGroup(sourceDatanode, sourceStorageTypes.get(planIndex));
//build target
DatanodeInfo targetDatanode = CompatibilityHelperLoader.getHelper()
.newDatanodeInfo(targetIpAddrs.get(planIndex), targetXferPorts.get(planIndex));
StorageGroup target = new StorageGroup(targetDatanode, targetStorageTypes.get(planIndex));
// generate single move
ReplicaMove replicaMove = new ReplicaMove(block, source, target, nnc, saslClient);
allMoves.add(replicaMove);
}
}
private void generateSourceMap() throws IOException {
sourceBlockMap = new HashMap<>();
sourceDatanodeMap = new HashMap<>();
for (LocatedBlock locatedBlock : locatedBlocks) {
sourceBlockMap.put(locatedBlock.getBlock().getBlockId(), locatedBlock.getBlock().getLocalBlock());
for (DatanodeInfo datanodeInfo : locatedBlock.getLocations()) {
sourceDatanodeMap.put(datanodeInfo.getDatanodeUuid(), datanodeInfo);
}
}
}
}
| apache-2.0 |
kexinrong/macrobase | contrib/src/main/java/macrobase/analysis/contextualoutlier/conf/ContextualConf.java | 1931 | package macrobase.analysis.contextualoutlier.conf;
import macrobase.conf.ConfigurationException;
import macrobase.conf.MacroBaseConf;
public class ContextualConf {
public static final String CONTEXTUAL_API = "macrobase.analysis.contextual.api";
public static final String CONTEXTUAL_API_OUTLIER_PREDICATES = "macrobase.analysis.contextual.api.outlierPredicates";
public static final String CONTEXTUAL_DISCRETE_ATTRIBUTES = "macrobase.analysis.contextual.discreteAttributes";
public static final String CONTEXTUAL_DOUBLE_ATTRIBUTES = "macrobase.analysis.contextual.doubleAttributes";
public static final String CONTEXTUAL_DENSECONTEXTTAU = "macrobase.analysis.contextual.denseContextTau";
public static final String CONTEXTUAL_NUMINTERVALS = "macrobase.analysis.contextual.numIntervals";
public static final String CONTEXTUAL_MAX_PREDICATES = "macrobase.analysis.contextual.maxPredicates";
public static final String CONTEXTUAL_OUTPUT_FILE = "macrobase.analysis.contextual.outputFile";
public static final String CONTEXTUAL_PRUNING_DENSITY = "macrobase.analysis.contextual.pruning.density";
public static final String CONTEXTUAL_PRUNING_DEPENDENCY = "macrobase.analysis.contextual.pruning.dependency";
public static final String CONTEXTUAL_PRUNING_DISTRIBUTION_FOR_TRAINING = "macrobase.analysis.contextual.pruning.distributionForTraining";
public static final String CONTEXTUAL_PRUNING_DISTRIBUTION_FOR_SCORING = "macrobase.analysis.contextual.pruning.distributionForScoring";
public enum ContextualAPI {
findAllContextualOutliers,
findContextsGivenOutlierPredicate,
}
public static ContextualAPI getContextualAPI(MacroBaseConf conf) throws ConfigurationException {
if (!conf.isSet(CONTEXTUAL_API)) {
return ContextualDefaults.CONTEXTUAL_API;
}
return ContextualAPI.valueOf(conf.getString(CONTEXTUAL_API));
}
}
| apache-2.0 |
alibaba/cooma | src/test/java/com/alibaba/cooma/ext1/impl/SimpleExtImpl1.java | 1111 | /*
* Copyright 2012-2013 Cooma Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.cooma.ext1.impl;
import com.alibaba.cooma.ext1.SimpleExt;
import java.util.Map;
/**
* @author Jerry Lee(oldratlee AT gmail DOT com)
*/
public class SimpleExtImpl1 implements SimpleExt {
public String echo(Map<String, String> config, String s) {
return "Ext1Impl1-echo";
}
public String yell(Map<String, String> config, String s) {
return "Ext1Impl1-yell";
}
public String bang(Map<String, String> config, int i) {
return "bang1";
}
}
| apache-2.0 |
zhuxiaohao/buck | test/com/facebook/buck/cxx/CxxDependencyFileIntegrationTest.java | 4990 | /*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.junit.Assert.assertThat;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.BuildRuleSuccessType;
import com.facebook.buck.testutil.integration.DebuggableTemporaryFolder;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.IOException;
import java.nio.file.Files;
import java.util.Collection;
@RunWith(Parameterized.class)
public class CxxDependencyFileIntegrationTest {
@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> data() {
return ImmutableList.of(
new Object[]{CxxPreprocessMode.COMBINED},
new Object[]{CxxPreprocessMode.SEPARATE},
new Object[]{CxxPreprocessMode.PIPED});
}
@Parameterized.Parameter
public CxxPreprocessMode mode;
@Rule
public DebuggableTemporaryFolder tmp = new DebuggableTemporaryFolder();
private ProjectWorkspace workspace;
private BuildTarget target;
private BuildTarget preprocessTarget;
@Before
public void setUp() throws IOException {
workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "depfiles", tmp);
workspace.setUp();
workspace.writeContentsToPath(
"[cxx]\n preprocess_mode = " + mode.toString().toLowerCase() + "\n",
".buckconfig");
// Run a build and make sure it's successful.
workspace.runBuckBuild("//:test").assertSuccess();
// Find the target used for preprocessing and verify it ran.
target = BuildTargetFactory.newInstance("//:test");
CxxPlatform cxxPlatform = DefaultCxxPlatforms.build(new CxxBuckConfig(new FakeBuckConfig()));
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactoryHelper.of(target, cxxPlatform);
String source = "test.cpp";
if (mode == CxxPreprocessMode.SEPARATE) {
preprocessTarget =
cxxSourceRuleFactory.createPreprocessBuildTarget(
source,
CxxSource.Type.CXX,
CxxSourceRuleFactory.PicType.PDC);
} else {
preprocessTarget =
cxxSourceRuleFactory.createCompileBuildTarget(
source,
CxxSourceRuleFactory.PicType.PDC);
}
workspace.getBuildLog().assertTargetBuiltLocally(preprocessTarget.toString());
}
@Test
public void modifyingUsedHeaderCausesRebuild() throws IOException {
workspace.writeContentsToPath("#define SOMETHING", "used.h");
workspace.runBuckBuild(target.toString()).assertSuccess();
assertThat(
workspace.getBuildLog().getLogEntry(preprocessTarget).getSuccessType(),
Matchers.equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
}
@Test
public void modifyingUnusedHeaderDoesNotCauseRebuild() throws IOException {
workspace.writeContentsToPath("#define SOMETHING", "unused.h");
workspace.runBuckBuild(target.toString()).assertSuccess();
assertThat(
workspace.getBuildLog().getLogEntry(preprocessTarget).getSuccessType(),
Matchers.equalTo(Optional.of(BuildRuleSuccessType.MATCHING_DEP_FILE_RULE_KEY)));
}
@Test
public void modifyingOriginalSourceCausesRebuild() throws IOException {
workspace.writeContentsToPath("int main() { return 1; }", "test.cpp");
workspace.runBuckBuild(target.toString()).assertSuccess();
assertThat(
workspace.getBuildLog().getLogEntry(preprocessTarget).getSuccessType(),
Matchers.equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
}
@Test
public void removingUsedHeaderAndReferenceToItCausesRebuild() throws IOException {
workspace.writeContentsToPath("int main() { return 1; }", "test.cpp");
Files.delete(workspace.getPath("used.h"));
workspace.replaceFileContents("BUCK", "\'used.h\',", "");
workspace.runBuckBuild(target.toString()).assertSuccess();
assertThat(
workspace.getBuildLog().getLogEntry(preprocessTarget).getSuccessType(),
Matchers.equalTo(Optional.of(BuildRuleSuccessType.BUILT_LOCALLY)));
}
}
| apache-2.0 |
iafek/kiji-mapreduce | kiji-mapreduce/src/main/java/org/kiji/mapreduce/kvstore/impl/XmlKeyValueStoreParser.java | 16581 | /**
* (c) Copyright 2012 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.mapreduce.kvstore.impl;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ReflectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import org.kiji.annotations.ApiAudience;
import org.kiji.mapreduce.kvstore.KeyValueStore;
import org.kiji.mapreduce.kvstore.framework.KeyValueStoreConfiguration;
import org.kiji.mapreduce.kvstore.lib.TextFileKeyValueStore;
/**
* Utility that parses an XML file that specifies KeyValueStore implementations
* to bind in an application.
*/
@ApiAudience.Private
public final class XmlKeyValueStoreParser {
private static final Logger LOG = LoggerFactory.getLogger(
XmlKeyValueStoreParser.class.getName());
/** The Configuration used by ReflectionUtils to instantiate new KeyValueStores. */
private Configuration mConf;
/**
* Creates a new instance.
*
* @param conf the Configuration to use to instantiate KeyValueStores.
*/
private XmlKeyValueStoreParser(Configuration conf) {
mConf = conf;
}
/**
* This method returns XmlKeyValueStoreParser instance.
*
* @param conf the Hadoop Configuration to use to initialize the KeyValueStores.
* @return an XmlKeyValueStoreParser instance.
*/
public static XmlKeyValueStoreParser get(Configuration conf) {
return new XmlKeyValueStoreParser(conf);
}
/**
* Given an InputStream pointing to an opened resource that specifies a set of KeyValueStores
* via XML, return the map of names to configured KeyValueStore instances. The caller is
* responsible for closing the InputStream.
*
* <p>If an XML file tries to bind the same name to multiple stores, this will throw an
* IOException.</p>
*
* @param xmlStream the InputStream pointing to the XML resource to load
* @return a map from names to configured KeyValueStore instances.
* @throws IOException if there is an error reading from the input stream or parsing the XML.
*/
public Map<String, KeyValueStore<?, ?>> loadStoresFromXml(InputStream xmlStream)
throws IOException {
Map<String, KeyValueStore<?, ?>> outMap = new HashMap<String, KeyValueStore<?, ?>>();
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
Document doc = db.parse(xmlStream);
Element root = doc.getDocumentElement();
root.normalize();
if (!root.getNodeName().equals("stores")) {
throw new IOException("Expected <stores> as root element.");
}
for (int i = 0; i < root.getChildNodes().getLength(); i++) {
Node node = root.getChildNodes().item(i);
if (node.getNodeType() != Node.ELEMENT_NODE) {
continue;
}
if (node.getNodeName().equals("store")) {
NamedNodeMap attrs = node.getAttributes();
Node nameNode = attrs.getNamedItem("name");
Node classNode = attrs.getNamedItem("class");
if (null == nameNode) {
throw new IOException("Expected 'name' attribute in <store>");
} else if (null == classNode) {
throw new IOException("Expected 'class' attribute in <store>");
}
String storeName = nameNode.getNodeValue();
String storeClassStr = classNode.getNodeValue();
if (storeName.isEmpty()) {
throw new IOException("Expected non-empty store name");
} else if (storeClassStr.isEmpty()) {
throw new IOException("Expected non-empty store class");
}
if (outMap.containsKey(storeName)) {
throw new IOException("Store with name \"" + storeName
+ "\" is defined multiple times");
}
// If the store class string does not contain any package specification,
// auto-append org.kiji.mapreduce.kvstore.
// TODO(KIJI-364): Make this compatible with user-written kvstores that live
// in the default package. (Maybe try instantiating them first?)
if (!storeClassStr.contains(".")) {
// TODO: Make this a sane integration when these classes move to kiji-mr-lib
storeClassStr = TextFileKeyValueStore.class.getPackage().getName()
+ "." + storeClassStr;
}
try {
Class<?> userStoreClass = Class.forName(storeClassStr);
if (!KeyValueStore.class.isAssignableFrom(userStoreClass)) {
throw new IOException("Class " + userStoreClass.getName()
+ " does not extend KeyValueStore");
}
@SuppressWarnings("rawtypes")
Class<? extends KeyValueStore> storeClass =
userStoreClass.asSubclass(KeyValueStore.class);
LOG.info("Instantiating " + storeClass.getName() + " for store name " + storeName);
// Create the store instance, and then configure it by
// parsing the <store> element.
KeyValueStore<?, ?> store = ReflectionUtils.newInstance(storeClass, mConf);
configureFromXml(store, storeName, node);
outMap.put(storeName, store);
} catch (ClassNotFoundException cnfe) {
throw new IOException("No such class: " + storeClassStr, cnfe);
}
} else {
// We only expect <store> blocks in here.
throw new IOException("Unexpected first-level element: " + node.getNodeName());
}
}
} catch (ParserConfigurationException pce) {
throw new IOException(pce);
} catch (SAXException se) {
throw new IOException(se);
}
return outMap;
}
/**
* Allows a store to define a mechanism for reading arbitrary serialized data from an
* XML file specifying KeyValueStore definitions.
*
* <p>KeyValueStore definitions may be specified in an XML file applied by the user.
* Each store is defined in a <store> element. KeyValueStore implementations should
* expect the store element to contain a single child element called <configuration>.
* This method reads this child element
* as a KeyValueStoreConfiguration object, then initializes the argument KeyValueStore by passing
* this to initFromConf().</p>
*
* <p>If no <configuration> element is present, the KeyValueStore is initialized
* with an empty KeyValueStoreConfiguration.</p>
*
* @param store the store instance being configured.
* @param storeName the name being bound to this store instance.
* @param xmlNode the w3c DOM node representing the <store> element in the document.
* @throws IOException if there is an error parsing the XML document node.
*/
private void configureFromXml(KeyValueStore<?, ?> store, String storeName, Node xmlNode)
throws IOException {
NodeList storeChildren = xmlNode.getChildNodes();
Node configurationNode = null;
int numRealChildren = 0;
for (int j = 0; j < storeChildren.getLength(); j++) {
Node storeChild = storeChildren.item(j);
if (storeChild.getNodeType() != Node.ELEMENT_NODE) {
continue;
} else {
numRealChildren++;
if (storeChild.getNodeName().equals("configuration")) {
configurationNode = storeChild;
}
}
}
if (numRealChildren > 1) {
// Don't recognize the XML schema here.
throw new IOException("Unrecognized XML schema for store " + storeName
+ "; expected <configuration> element.");
} else if (numRealChildren == 0) {
assert null == configurationNode;
LOG.warn("No <configuration> supplied for store " + storeName);
store.initFromConf(
KeyValueStoreConfiguration.createInConfiguration(new Configuration(mConf), 0));
} else if (null == configurationNode) {
// Got a single child element, but it wasn't a <configuration>.
throw new IOException("Unrecognized XML schema for store " + storeName
+ "; expected <configuration> element.");
} else {
assert numRealChildren == 1;
// Configure the store by parsing the <configuration> element.
// The keys in this returned storeConf are all wrapped in a per-kvstore namespace,
// and do not contain any default keys.
Configuration storeConf = parseConfiguration(configurationNode);
// Create a "real" configuration with defaults, and add the elements of storeConf on top.
Configuration conf = new Configuration(mConf);
for (Map.Entry<String, String> entry : storeConf) {
conf.set(entry.getKey(), entry.getValue());
}
store.initFromConf(KeyValueStoreConfiguration.createInConfiguration(conf, 0));
}
}
/**
* Given a DOM Node object that represents a <configuration> block
* within a <store> object, reformat this as an xml document that can be parsed
* by {@link org.apache.hadoop.conf.Configuration}, and then return a
* Configuration instance to pass into a KeyValueStore object to instantiate.
*
* @param configNode a node representing a <configuration> element
* in the DOM that is the root of the KeyValueStore's configuration.
* @return a new Configuration containing the key-value pairs associated
* with this node.
* @throws IOException if there's an error processing the XML data.
*/
private Configuration parseConfiguration(Node configNode) throws IOException {
if (null == configNode) {
return null;
} else if (!configNode.getNodeName().equals("configuration")) {
throw new IOException("Expected <configuration> node, got " + configNode.getNodeName());
}
try {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
Document document = builder.newDocument();
Element root = document.createElement("configuration");
document.appendChild(root);
copyConfigNodes(root, configNode, document);
TransformerFactory tf = TransformerFactory.newInstance();
Transformer transformer = tf.newTransformer();
transformer.setOutputProperty(OutputKeys.METHOD, "xml");
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8");
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
transformer.transform(new DOMSource(document), new StreamResult(outStream));
String confXmlText = outStream.toString("UTF-8");
// This only contains entries from the XML file component for this store; no defaults.
Configuration conf = new Configuration(false);
conf.addResource(new ByteArrayInputStream(confXmlText.getBytes("UTF-8")));
// Use KeyValueStoreConfiguration.fromConf() to remap these nodes into a namespace
// for this individual key-value store, but return the underlying Configuration object.
return KeyValueStoreConfiguration.fromConf(conf).getDelegate();
} catch (TransformerConfigurationException e) {
throw new RuntimeException(e);
} catch (TransformerException e) {
throw new RuntimeException(e);
} catch (ParserConfigurationException e) {
throw new IOException(e);
}
}
/**
* Given a src and dest node that both represent <configuration>
* elements, copy the <property> objects from src to dest.
*
* <p>The dest element is structurally modified by this operation. The
* src argument is not modified.</p>
*
* <p>The <name> and <value> elements within each property
* are copied across; other elements such as <final> are ignored.</p>
*
* <p>The text associated with each name is modified to include a "header"
* that mirrors the KeyValueStore configuration serialization system;
* the properties are placed in the sub-namespace of the configuration
* associated with the '0' KeyValueStore being serialized to a Configuration
* instance via {@link KeyValueStore#storeToConf(KeyValueStoreConfiguration)}.</p>
*
* @param src the input <configuration> element.
* @param dest the target <configuration> element.
* @param doc the target XML document.
* @throws IOException if there is an error parsing the XML.
*/
private void copyConfigNodes(Element dest, Node src, Document doc) throws IOException {
assert null != dest;
assert null != src;
NodeList children = src.getChildNodes();
for (int i = 0; i < children.getLength(); i++) {
Node child = children.item(i);
if (child.getNodeType() != Node.ELEMENT_NODE) {
continue;
}
if (child.getNodeName().equals("property")) {
Node outProp = copyPropertyNode(child, doc);
dest.appendChild(outProp);
} else {
throw new IOException("Unexpected element in configuration: " + child.getNodeName());
}
}
}
/**
* Deep copies a node representing an <property> element.
*
* <p>Modifies the <name> element to include a header that puts the
* property in the "namespace" of the 0 element KeyValueStore in a Configuration.</p>
*
* @param propertyNode the input property to clone.
* @param doc the output XML document we're building
* @return a Node representing the same property in the "namespace" of the 0
* element KeyValueStore in a Configuration.
* @throws IOException if there is an error parsing the input XML.
*/
private Node copyPropertyNode(Node propertyNode, Document doc) throws IOException {
Element out = doc.createElement("property");
NodeList propChildren = propertyNode.getChildNodes();
for (int i = 0; i < propChildren.getLength(); i++) {
Node child = propChildren.item(i);
if (child.getNodeType() != Node.ELEMENT_NODE) {
continue;
} else if (child.getNodeName().equals("name")) {
Element outName = doc.createElement("name");
String inName = getChildText(child);
outName.appendChild(doc.createTextNode(inName));
out.appendChild(outName);
} else if (child.getNodeName().equals("value")) {
Element outVal = doc.createElement("value");
outVal.appendChild(doc.createTextNode(getChildText(child)));
out.appendChild(outVal);
}
}
return out;
}
/**
* Given an element with a text child, return the string contents of that
* text child.
*
* @param elem the input element node.
* @return the string contents of the single text child element.
* @throws IOException if the XML DOM under this element is not a single text node.
*/
private String getChildText(Node elem) throws IOException {
assert elem.getNodeType() == Node.ELEMENT_NODE;
NodeList children = elem.getChildNodes();
if (children.getLength() != 1) {
throw new IOException("Expected exactly one text value under " + elem.getNodeName());
}
return children.item(0).getNodeValue();
}
}
| apache-2.0 |
opengeospatial/ets-19139 | src/main/java/org/opengis/cite/iso19139/util/TestSuiteLogger.java | 2438 | package org.opengis.cite.iso19139.util;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Logging utility class that provides simple access to the JDK Logging API. Set
* the "java.util.logging.config.file" system property to specify the location
* of the desired logging configuration file. A sample configuration file is
* available at {@code src/main/config/logging.properties}.
*
* @see java.util.logging.LogManager LogManager
*/
public class TestSuiteLogger {
private static final Logger LOGR
= Logger.getLogger(TestSuiteLogger.class.getPackage().getName());
/**
* Logs a message at the specified logging level with the given message
* parameters.
*
* @param level The logging {@link Level level}.
* @param message A String representing the content of the log message.
* @param params An array of message parameters.
*/
public static void log(Level level, String message, Object[] params) {
if (LOGR.isLoggable(level)) {
LOGR.log(level, message, params);
}
}
/**
* Logs a message at the specified logging level with the given Exception
* object that represents a noteworthy error condition.
*
* @param level The logging {@link Level level}.
* @param message A String representing the content of the log message.
* @param except An object that indicates an exceptional situation.
*/
public static void log(Level level, String message, Exception except) {
if (LOGR.isLoggable(level)) {
LOGR.log(level, message, except);
}
}
/**
* Logs a simple message at the specified logging level.
*
* @param level The logging {@link Level level}.
* @param message A String representing the content of the log message.
*/
public static void log(Level level, String message) {
if (LOGR.isLoggable(level)) {
LOGR.log(level, message);
}
}
/**
* Indicates if the logger is enabled at a given logging level. Message
* levels lower than this value will be discarded.
*
* @param level The logging {@link Level level}.
* @return true if the logger is currently enabled for this logging level;
* false otherwise.
*/
public static boolean isLoggable(Level level) {
return LOGR.isLoggable(level);
}
private TestSuiteLogger() {
}
}
| apache-2.0 |
larusba/neo4j-jdbc | neo4j-jdbc-http/src/test/java/org/neo4j/jdbc/http/HttpNeo4jResultSetTest.java | 1153 | /*
* Copyright (c) 2016 LARUS Business Automation [http://www.larus-ba.it]
* <p>
* This file is part of the "LARUS Integration Framework for Neo4j".
* <p>
* The "LARUS Integration Framework for Neo4j" is licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* <p>
* Created on 19/06/16
*/
package org.neo4j.jdbc.http;
import org.junit.Test;
import java.sql.SQLException;
/**
* @author AgileLARUS
* @since 3.0
*/
public class HttpNeo4jResultSetTest {
@Test
public void closeMultipleTimeShouldBeNOOP() throws SQLException {
HttpNeo4jResultSet rs = new HttpNeo4jResultSet(null, null);
rs.close();
rs.close();
rs.close();
}
}
| apache-2.0 |
AlexOreshkevich/strongbox | strongbox-storage/strongbox-storage-indexing/src/test/java/org/carlspring/strongbox/resource/ResourcesBooterTest.java | 1202 | package org.carlspring.strongbox.resource;
import org.carlspring.strongbox.booters.ResourcesBooter;
import org.carlspring.strongbox.testing.TestCaseWithArtifactGenerationWithIndexing;
import java.io.File;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import static org.junit.Assert.assertTrue;
/**
* @author mtodorov
*/
@RunWith(SpringJUnit4ClassRunner.class)
public class ResourcesBooterTest extends TestCaseWithArtifactGenerationWithIndexing
{
private static final Logger logger = LoggerFactory.getLogger(ResourcesBooterTest.class);
// This field is indeed used. It's execute() method is being invoked with a @PostConstruct.
@Autowired
private ResourcesBooter resourcesBooter;
@Test
public void testResourceBooting()
throws Exception
{
File file = new File(ConfigurationResourceResolver.getHomeDirectory() + "/etc/conf/strongbox.xml");
assertTrue("Failed to copy configuration resource from classpath!", file.exists());
}
}
| apache-2.0 |
YOU-i-Labs/appium-java-client | src/main/java/io/appium/java_client/MobileBy.java | 5290 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.appium.java_client;
import org.apache.commons.lang3.StringUtils;
import org.openqa.selenium.By;
import org.openqa.selenium.SearchContext;
import org.openqa.selenium.WebElement;
import java.io.Serializable;
import java.util.List;
@SuppressWarnings("serial")
public abstract class MobileBy extends By {
private final String locatorString;
protected MobileBy(String locatorString) {
if (StringUtils.isBlank(locatorString)) {
throw new IllegalArgumentException("Must supply a not empty locator value.");
}
this.locatorString = locatorString;
}
protected String getLocatorString() {
return locatorString;
}
/**
* Read https://developer.apple.com/library/tvos/documentation/DeveloperTools/
* Conceptual/InstrumentsUserGuide/UIAutomation.html
*
* @param iOSAutomationText is iOS UIAutomation string
* @return an instance of {@link io.appium.java_client.MobileBy.ByIosUIAutomation}
*/
public static By IosUIAutomation(final String iOSAutomationText) {
return new ByIosUIAutomation(iOSAutomationText);
}
/**
* Read http://developer.android.com/intl/ru/tools/testing-support-library/
* index.html#uia-apis
* @param uiautomatorText is Android UIAutomator string
* @return an instance of {@link io.appium.java_client.MobileBy.ByAndroidUIAutomator}
*/
public static By AndroidUIAutomator(final String uiautomatorText) {
return new ByAndroidUIAutomator(uiautomatorText);
}
/**
* About Android accessibility
* https://developer.android.com/intl/ru/training/accessibility/accessible-app.html
* About iOS accessibility
* https://developer.apple.com/library/ios/documentation/UIKit/Reference/
* UIAccessibilityIdentification_Protocol/index.html
* @param accessibilityId id is a convenient UI automation accessibility Id.
* @return an instance of {@link io.appium.java_client.MobileBy.ByAndroidUIAutomator}
*/
public static By AccessibilityId(final String accessibilityId) {
return new ByAccessibilityId(accessibilityId);
}
public static class ByIosUIAutomation extends MobileBy implements Serializable {
public ByIosUIAutomation(String iOSAutomationText) {
super(iOSAutomationText);
}
@SuppressWarnings("unchecked")
@Override
public List<WebElement> findElements(SearchContext context) {
return (List<WebElement>) ((FindsByIosUIAutomation<?>) context)
.findElementsByIosUIAutomation(getLocatorString());
}
@Override public WebElement findElement(SearchContext context) {
return ((FindsByIosUIAutomation<?>) context)
.findElementByIosUIAutomation(getLocatorString());
}
@Override public String toString() {
return "By.IosUIAutomation: " + getLocatorString();
}
}
public static class ByAndroidUIAutomator extends MobileBy implements Serializable {
public ByAndroidUIAutomator(String uiautomatorText) {
super(uiautomatorText);
}
@SuppressWarnings("unchecked")
@Override
public List<WebElement> findElements(SearchContext context) {
return (List<WebElement>) ((FindsByAndroidUIAutomator<?>) context)
.findElementsByAndroidUIAutomator(getLocatorString());
}
@Override public WebElement findElement(SearchContext context) {
return ((FindsByAndroidUIAutomator<?>) context)
.findElementByAndroidUIAutomator(getLocatorString());
}
@Override public String toString() {
return "By.AndroidUIAutomator: " + getLocatorString();
}
}
public static class ByAccessibilityId extends MobileBy implements Serializable {
public ByAccessibilityId(String accessibilityId) {
super(accessibilityId);
}
@SuppressWarnings("unchecked")
@Override
public List<WebElement> findElements(SearchContext context) {
return (List<WebElement>) ((FindsByAccessibilityId<?>) context)
.findElementsByAccessibilityId(getLocatorString());
}
@Override public WebElement findElement(SearchContext context) {
return ((FindsByAccessibilityId<?>) context)
.findElementByAccessibilityId(getLocatorString());
}
@Override public String toString() {
return "By.AccessibilityId: " + getLocatorString();
}
}
}
| apache-2.0 |
UniquePassive/runelite | deobfuscator/src/main/java/net/runelite/deob/deobfuscators/IllegalStateExceptions.java | 5090 | /*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.deob.deobfuscators;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import net.runelite.asm.ClassFile;
import net.runelite.asm.ClassGroup;
import net.runelite.asm.Method;
import net.runelite.asm.attributes.Code;
import net.runelite.asm.attributes.code.Instruction;
import net.runelite.asm.attributes.code.Instructions;
import net.runelite.asm.attributes.code.instruction.types.ComparisonInstruction;
import net.runelite.asm.attributes.code.instruction.types.JumpingInstruction;
import net.runelite.asm.attributes.code.instructions.AThrow;
import net.runelite.asm.attributes.code.instructions.Goto;
import net.runelite.asm.attributes.code.instructions.If;
import net.runelite.asm.attributes.code.instructions.New;
import net.runelite.asm.execution.Execution;
import net.runelite.asm.execution.InstructionContext;
import net.runelite.asm.execution.MethodContext;
import net.runelite.deob.Deobfuscator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class IllegalStateExceptions implements Deobfuscator
{
private static final Logger logger = LoggerFactory.getLogger(IllegalStateExceptions.class);
private int count;
private Set<Instruction> interesting = new HashSet<>();
private List<InstructionContext> toRemove = new ArrayList<>();
/* find if, new, ..., athrow, replace with goto */
private void findInteresting(ClassGroup group)
{
for (ClassFile cf : group.getClasses())
{
for (Method m : cf.getMethods())
{
Code c = m.getCode();
if (c == null)
continue;
Instructions instructions = c.getInstructions();
List<Instruction> ilist = instructions.getInstructions();
for (int i = 0; i < ilist.size(); ++i)
{
Instruction ins = ilist.get(i);
if (!(ins instanceof ComparisonInstruction)) // the if
continue;
Instruction ins2 = ilist.get(i + 1);
if (!(ins2 instanceof New))
continue;
New new2 = (New) ins2;
net.runelite.asm.pool.Class clazz = new2.getNewClass();
if (!clazz.getName().contains("java/lang/IllegalStateException"))
continue;
interesting.add(ins);
}
}
}
}
private void visit(InstructionContext ic)
{
if (interesting.contains(ic.getInstruction()))
{
toRemove.add(ic);
}
}
private void visit(MethodContext ctx)
{
for (InstructionContext ictx : toRemove)
processOne(ictx);
toRemove.clear();
}
private void processOne(InstructionContext ic)
{
Instruction ins = ic.getInstruction();
Instructions instructions = ins.getInstructions();
if (instructions == null)
return;
List<Instruction> ilist = instructions.getInstructions();
JumpingInstruction jumpIns = (JumpingInstruction) ins;
assert jumpIns.getJumps().size() == 1;
Instruction to = jumpIns.getJumps().get(0);
// remove stack of if.
if (ins instanceof If)
{
ic.removeStack(1);
}
ic.removeStack(0);
int i = ilist.indexOf(ins);
assert i != -1;
// remove up to athrow
while (!(ins instanceof AThrow))
{
instructions.remove(ins);
ins = ilist.get(i); // don't need to ++i because
}
// remove athrow
instructions.remove(ins);
// insert goto
assert ilist.contains(to);
Goto g = new Goto(instructions, instructions.createLabelFor(to));
ilist.add(i, g);
++count;
}
@Override
public void run(ClassGroup group)
{
findInteresting(group);
Execution execution = new Execution(group);
execution.addExecutionVisitor(i -> visit(i));
execution.addMethodContextVisitor(i -> visit(i));
execution.populateInitialMethods();
execution.run();
logger.info("Removed " + count + " illegal state exceptions");
}
}
| bsd-2-clause |
tkmnet/RCRS-ADF | gradle/gradle-2.1/src/core-impl/org/gradle/api/internal/artifacts/repositories/DefaultMavenArtifactRepository.java | 4893 | /*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.internal.artifacts.repositories;
import com.google.common.collect.Lists;
import org.gradle.api.InvalidUserDataException;
import org.gradle.api.artifacts.repositories.MavenArtifactRepository;
import org.gradle.api.artifacts.repositories.PasswordCredentials;
import org.gradle.api.internal.artifacts.ModuleVersionPublisher;
import org.gradle.api.internal.artifacts.ivyservice.ivyresolve.ConfiguredModuleComponentRepository;
import org.gradle.api.internal.artifacts.metadata.ModuleVersionArtifactMetaData;
import org.gradle.api.internal.artifacts.repositories.resolver.MavenResolver;
import org.gradle.api.internal.artifacts.repositories.transport.RepositoryTransport;
import org.gradle.api.internal.artifacts.repositories.transport.RepositoryTransportFactory;
import org.gradle.api.internal.file.FileResolver;
import org.gradle.internal.resource.local.FileStore;
import org.gradle.internal.resource.local.LocallyAvailableResourceFinder;
import java.net.URI;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
public class DefaultMavenArtifactRepository extends AbstractAuthenticationSupportedRepository implements MavenArtifactRepository, ResolutionAwareRepository, PublicationAwareRepository {
private final FileResolver fileResolver;
private final RepositoryTransportFactory transportFactory;
private Object url;
private List<Object> additionalUrls = new ArrayList<Object>();
private final LocallyAvailableResourceFinder<ModuleVersionArtifactMetaData> locallyAvailableResourceFinder;
private final FileStore<ModuleVersionArtifactMetaData> artifactFileStore;
public DefaultMavenArtifactRepository(FileResolver fileResolver, PasswordCredentials credentials, RepositoryTransportFactory transportFactory,
LocallyAvailableResourceFinder<ModuleVersionArtifactMetaData> locallyAvailableResourceFinder,
FileStore<ModuleVersionArtifactMetaData> artifactFileStore) {
super(credentials);
this.fileResolver = fileResolver;
this.transportFactory = transportFactory;
this.locallyAvailableResourceFinder = locallyAvailableResourceFinder;
this.artifactFileStore = artifactFileStore;
}
public URI getUrl() {
return url == null ? null : fileResolver.resolveUri(url);
}
public void setUrl(Object url) {
this.url = url;
}
public Set<URI> getArtifactUrls() {
Set<URI> result = new LinkedHashSet<URI>();
for (Object additionalUrl : additionalUrls) {
result.add(fileResolver.resolveUri(additionalUrl));
}
return result;
}
public void artifactUrls(Object... urls) {
additionalUrls.addAll(Lists.newArrayList(urls));
}
public void setArtifactUrls(Iterable<?> urls) {
additionalUrls = Lists.newArrayList(urls);
}
public ModuleVersionPublisher createPublisher() {
return createRealResolver();
}
public ConfiguredModuleComponentRepository createResolver() {
return createRealResolver();
}
protected MavenResolver createRealResolver() {
URI rootUri = getUrl();
if (rootUri == null) {
throw new InvalidUserDataException("You must specify a URL for a Maven repository.");
}
MavenResolver resolver = createResolver(rootUri);
for (URI repoUrl : getArtifactUrls()) {
resolver.addArtifactLocation(repoUrl);
}
return resolver;
}
private MavenResolver createResolver(URI rootUri) {
RepositoryTransport transport = getTransport(rootUri.getScheme());
return new MavenResolver(getName(), rootUri, transport, locallyAvailableResourceFinder, artifactFileStore);
}
protected FileStore<ModuleVersionArtifactMetaData> getArtifactFileStore() {
return artifactFileStore;
}
protected RepositoryTransport getTransport(String scheme) {
return transportFactory.createTransport(scheme, getName(), getCredentials());
}
protected LocallyAvailableResourceFinder<ModuleVersionArtifactMetaData> getLocallyAvailableResourceFinder() {
return locallyAvailableResourceFinder;
}
}
| bsd-2-clause |
Sollace/BlazeLoader | src/main/com/blazeloader/util/reflect/BufferedFunc.java | 2267 | package com.blazeloader.util.reflect;
import com.blazeloader.bl.obf.BLOBF;
/**
* A buffered version of Func permanently bound to a given instance
*
* @param <I> The class host for this method
* @param <T> The interface type containing the method signature used.
* @param <R> The return type for this method
*/
public class BufferedFunc<I, T, R> extends Function<I, T, R> {
private final I instance;
private T lambda;
public BufferedFunc(Class<T> interfaceType, I context, Class<R> returnType, String name, Class... pars) {
super(interfaceType, (Class<I>)context.getClass(), returnType, name, false, pars);
instance = context;
init();
}
public BufferedFunc(Class<T> interfaceType, I context, BLOBF obf) {
super(interfaceType, false ,obf);
instance = context;
init();
}
protected BufferedFunc(I context, Function<I, T, R> other) {
super(other);
instance = context;
init();
}
private void init() {
try {
lambda = (T)handle.factory.bindTo(instance).invoke();
} catch (Throwable e) {
lambda = null;
handle.factory = null;
}
}
/**
* Invokes the underlying method with the given arguments and instance context.
*
* @param args Object array of arguments.
* <p>
* Note: Calling a method through its lambda can be considerably faster than using call or apply depending on usage.
*
* @return The returned result of the method
* @throws Throwable if there is any error.
*/
public R apply(Object... args) throws Throwable {
return (R)handle.target.bindTo(instance).invokeWithArguments(args);
}
/**
* Gets a lambda object built from the given interface class with the underlying method as its implementation.
* <p>
* Note: Calling a method through its lambda can be considerably faster than using call or apply depending on usage.
*
* @return lambda T
* @throws Throwable if there is any error.
*/
public T getLambda() {
return lambda;
}
public void invalidate() {
super.invalidate();
lambda = null;
}
public boolean valid() {
return super.valid() && lambda != null;
}
/**
* Creates a new BufferedFunc bound to the given instance
*/
public BufferedFunc<I, T, R> newWithInstance(I newInstance) {
return new BufferedFunc(newInstance, this);
}
}
| bsd-2-clause |
ksclarke/basex | basex-core/src/main/java/org/basex/query/expr/gflwor/Let.java | 5439 | package org.basex.query.expr.gflwor;
import static org.basex.query.QueryText.*;
import java.util.*;
import org.basex.query.*;
import org.basex.query.expr.*;
import org.basex.query.expr.gflwor.GFLWOR.Clause;
import org.basex.query.expr.gflwor.GFLWOR.Eval;
import org.basex.query.func.*;
import org.basex.query.iter.*;
import org.basex.query.util.*;
import org.basex.query.value.*;
import org.basex.query.value.item.*;
import org.basex.query.value.node.*;
import org.basex.query.value.type.*;
import org.basex.query.var.*;
import org.basex.util.*;
import org.basex.util.ft.*;
import org.basex.util.hash.*;
/**
* FLWOR {@code let} clause, binding an expression to a variable.
*
* @author BaseX Team 2005-15, BSD License
* @author Leo Woerteler
*/
public final class Let extends ForLet {
/**
* Constructor.
* @param var variable
* @param expr expression
* @param score score flag
* @param info input info
*/
public Let(final Var var, final Expr expr, final boolean score, final InputInfo info) {
super(info, var, expr, score, var);
}
/**
* Creates a let expression from a for loop over a single item.
* @param fr for loop
* @return let binding
*/
static Let fromFor(final For fr) {
final Let lt = new Let(fr.var, fr.expr, false, fr.info);
lt.seqType = fr.expr.seqType();
return lt;
}
/**
* Creates a let binding for the score variable of a for clause.
* @param fr for clause
* @return let binding for the score variable
*/
static Let fromForScore(final For fr) {
final Expr varRef = new VarRef(fr.info, fr.var);
return new Let(fr.score, varRef, true, fr.info);
}
@Override
LetEval eval(final Eval sub) {
if(!(sub instanceof LetEval)) return new LetEval(this, sub);
final LetEval eval = (LetEval) sub;
eval.lets.add(this);
return eval;
}
/**
* Calculates the score of the given iterator.
* @param iter iterator
* @return score
* @throws QueryException evaluation exception
*/
private static Dbl score(final Iter iter) throws QueryException {
double s = 0;
int c = 0;
for(Item it; (it = iter.next()) != null; s += it.score(), c++);
return Dbl.get(Scoring.avg(s, c));
}
@Override
public Clause compile(final QueryContext qc, final VarScope scp) throws QueryException {
final Clause c = super.compile(qc, scp);
var.refineType(scoring ? SeqType.DBL : expr.seqType(), qc, info);
return c;
}
@Override
public Let optimize(final QueryContext qc, final VarScope scp) throws QueryException {
if(!scoring && expr instanceof TypeCheck) {
final TypeCheck tc = (TypeCheck) expr;
if(tc.isRedundant(var) || var.adoptCheck(tc.seqType(), tc.promote)) {
qc.compInfo(OPTCAST, tc.seqType());
expr = tc.expr;
}
}
seqType = scoring ? SeqType.DBL : expr.seqType();
var.refineType(seqType, qc, info);
if(var.checksType() && expr.isValue()) {
expr = var.checkType((Value) expr, qc, info, true);
var.refineType(expr.seqType(), qc, info);
}
size = scoring ? 1 : expr.size();
var.size = size;
var.data = expr.data();
return this;
}
@Override
public Let copy(final QueryContext qc, final VarScope scp, final IntObjMap<Var> vs) {
final Var v = scp.newCopyOf(qc, var);
vs.put(var.id, v);
return new Let(v, expr.copy(qc, scp, vs), scoring, info);
}
@Override
public boolean accept(final ASTVisitor visitor) {
return expr.accept(visitor) && visitor.declared(var);
}
@Override
void calcSize(final long[] minMax) {
}
/**
* Returns an expression that is appropriate for inlining.
* @param qc query context
* @param scp variable scope
* @return inlineable expression
* @throws QueryException query exception
*/
Expr inlineExpr(final QueryContext qc, final VarScope scp) throws QueryException {
return scoring ? Function._FT_SCORE.get(null, info, expr).optimize(qc, scp)
: var.checked(expr, qc, scp, info);
}
@Override
public void plan(final FElem plan) {
final FElem e = planElem();
if(scoring) e.add(planAttr(Token.token(SCORE), Token.TRUE));
var.plan(e);
expr.plan(e);
plan.add(e);
}
@Override
public String toString() {
return LET + ' ' + (scoring ? SCORE + ' ' : "") + var + ' ' + ASSIGN + ' ' + expr;
}
/** Evaluator for a block of {@code let} expressions. */
private static class LetEval extends Eval {
/** Let expressions of the current block, in declaration order. */
private final ArrayList<Let> lets;
/** Sub-evaluator. */
private final Eval sub;
/**
* Constructor for the first let binding in the block.
* @param let first let binding
* @param subEval sub-evaluator
*/
LetEval(final Let let, final Eval subEval) {
lets = new ArrayList<>();
lets.add(let);
sub = subEval;
}
@Override
boolean next(final QueryContext qc) throws QueryException {
if(!sub.next(qc)) return false;
for(final Let let : lets) {
final Value vl;
if(let.scoring) {
final boolean s = qc.scoring;
try {
qc.scoring = true;
vl = score(let.expr.iter(qc));
} finally {
qc.scoring = s;
}
} else {
vl = qc.value(let.expr);
}
qc.set(let.var, vl, let.info);
}
return true;
}
}
}
| bsd-3-clause |
NCIP/cagrid | cagrid/Software/core/caGrid/projects/wizard/src/org/pietschy/wizard/models/StaticModel.java | 4540 | /**
* Wizard Framework
* Copyright 2004 - 2005 Andrew Pietsch
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* $Id: StaticModel.java,v 1.1 2007-05-17 13:58:49 joshua Exp $
*/
package org.pietschy.wizard.models;
import org.pietschy.wizard.AbstractWizardModel;
import org.pietschy.wizard.WizardStep;
import org.pietschy.wizard.OverviewProvider;
import org.pietschy.wizard.Wizard;
import javax.swing.*;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Collections;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeEvent;
import java.awt.*;
/**
* This class provides the basis for a simple linear wizard model. Steps are added by calling the
* {@link #add} method and are traversed in the order of addition.
*/
public class
StaticModel
extends AbstractWizardModel
implements OverviewProvider
{
private ArrayList steps = new ArrayList();
private int currentStep = 0;
private StaticModelOverview overviewComponent;
public StaticModel()
{
}
/**
* Resest this model. This method rewinds to the first step in the wizard.
*/
public void
reset()
{
currentStep = 0;
setActiveStep((WizardStep) steps.get(currentStep));
}
public void
nextStep()
{
if (currentStep >= steps.size() - 1)
throw new IllegalStateException("Already on last step");
currentStep++;
setActiveStep((WizardStep)steps.get(currentStep));
}
public void
previousStep()
{
if (currentStep == 0)
throw new IllegalStateException("Already at first step");
currentStep--;
setActiveStep((WizardStep) steps.get(currentStep));
}
public void
lastStep()
{
currentStep = steps.size() - 1;
setActiveStep((WizardStep) steps.get(currentStep));
}
public boolean
isLastStep(WizardStep step)
{
return steps.indexOf(step) == steps.size() - 1;
}
public Iterator
stepIterator()
{
return Collections.unmodifiableList(steps).iterator();
}
/**
* Adds a step to the end of the wizard.
* @param step the {@link WizardStep} to add.
*/
public void
add(WizardStep step)
{
steps.add(step);
addCompleteListener(step);
}
/**
* This method is invoked after the current step has been changed to update the state
* of the model.
*/
public void
refreshModelState()
{
setNextAvailable(getActiveStep().isComplete() && !isLastStep(getActiveStep()));
setPreviousAvailable(currentStep > 0);
setLastAvailable(allStepsComplete() && !isLastStep(getActiveStep()));
setCancelAvailable(true);
}
/**
* Returns true if all the steps in the wizard return <tt>true</tt> from
* {@link WizardStep#isComplete}. This is primarily used to determine if the last button
* can be enabled.
* @return <tt>true</tt> if all the steps in the wizard are complete, <tt>false</tt> otherwise.
*/
public boolean
allStepsComplete()
{
for (Iterator iterator = steps.iterator(); iterator.hasNext();)
{
if (!((WizardStep) iterator.next()).isComplete())
return false;
}
return true;
}
/**
* Returns an JComponent that will serve as an overview for this wizard. The overview can be
* disabled by calling {@link Wizard#setOverviewVisible} with a value of <tt>false</tt>.
* @return a component that provides an overview of this wizard and its current state.
*/
public JComponent
getOverviewComponent()
{
if (overviewComponent == null)
overviewComponent = new StaticModelOverview(this);
return overviewComponent;
}
}
| bsd-3-clause |
leriomaggio/code-coherence-evaluation-tool | code_comments_coherence/media/jfreechart/0.7.1/jfreechart-071zip/extracted/jfreechart-0.7.1/source/com/jrefinery/chart/HorizontalBarRenderer.java | 6206 | /* =======================================
* JFreeChart : a Java Chart Class Library
* =======================================
*
* Project Info: http://www.jrefinery.com/jfreechart;
* Project Lead: David Gilbert (david.gilbert@jrefinery.com);
*
* (C) Copyright 2000-2002, by Simba Management Limited and Contributors.
*
* This library is free software; you can redistribute it and/or modify it under the terms
* of the GNU Lesser General Public License as published by the Free Software Foundation;
* either version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License along with this
* library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330,
* Boston, MA 02111-1307, USA.
*
* --------------------------
* HorizontalBarRenderer.java
* --------------------------
* (C) Copyright 2001, 2002, by Simba Management Limited.
*
* Original Author: David Gilbert (for Simba Management Limited);
* Contributor(s): -;
*
* $Id: HorizontalBarRenderer.java,v 1.6 2001/11/16 19:48:47 mungady Exp $
*
* Changes
* -------
* 22-Oct-2001 : Version 1 (DG);
* Renamed DataSource.java --> Dataset.java etc. (DG);
* 23-Oct-2001 : Changed intro and trail gaps on bar plots to use percentage of available space
* rather than a fixed number of units (DG);
* 31-Oct-2001 : Debug for gaps (DG);
* 15-Nov-2001 : Modified to allow for null values (DG);
* 13-Dec-2001 : Changed drawBar(...) method to return a Shape (that can be used for tooltips) (DG);
* 16-Jan-2002 : Updated Javadoc comments (DG);
*
*/
package com.jrefinery.chart;
import java.awt.*;
import java.awt.geom.*;
import com.jrefinery.data.CategoryDataset;
/**
* Plug-in class that handles the drawing of bars on a horizontal bar plot.
*/
public class HorizontalBarRenderer {
/** Constant that controls the minimum width before a bar has an outline drawn. */
private static final double BAR_OUTLINE_WIDTH_THRESHOLD = 3.0;
/**
* Returns true, since for this renderer there are gaps between the items in one category.
*/
public boolean hasItemGaps() {
return true;
}
/**
* This renderer shows each series within a category as a separate bar (as opposed to a
* stacked bar renderer).
* @param data The data.
*/
public int barWidthsPerCategory(CategoryDataset data) {
return data.getSeriesCount();
}
/**
* Draws the bar for a single (series, category) data item.
* @param g2 The graphics device.
* @param dataArea The data area.
* @param plot The plot.
* @param valueAxis The range axis.
* @param data The data.
* @param series The series number (zero-based index).
* @param category The category.
* @param categoryIndex The category number (zero-based index).
* @param zeroToJava2D The data value zero translated into Java2D space.
* @param itemWidth The width of one bar.
* @param categorySpan The width of all items in one category.
* @param categoryGapSpan The width of all category gaps.
* @param itemSpan The width of all items.
* @param itemGapSpan The width of all item gaps.
*/
public Shape drawBar(Graphics2D g2, Rectangle2D dataArea, BarPlot plot, ValueAxis valueAxis,
CategoryDataset data, int series, Object category, int categoryIndex,
double zeroToJava2D, double itemWidth,
double categorySpan, double categoryGapSpan,
double itemSpan, double itemGapSpan) {
Shape result = null;
// first check the value we are plotting...
Number value = data.getValue(series, category);
if (value!=null) {
// X
double translatedValue = valueAxis.translateValueToJava2D(value.doubleValue(), dataArea);
double rectX = Math.min(zeroToJava2D, translatedValue);
// Y
double rectY = dataArea.getY() + dataArea.getHeight()*plot.getIntroGapPercent();
int categories = data.getCategoryCount();
int seriesCount = data.getSeriesCount();
if (categories>1) {
rectY = rectY
// bars in completed categories
+ (categoryIndex*categorySpan/categories)
// gaps between completed categories
+ (categoryIndex*categoryGapSpan/(categories-1))
// bars+gaps completed in current category
+ (series*itemSpan/(categories*seriesCount));
if (seriesCount>1) {
rectY = rectY
+ (series*itemGapSpan/(categories*(seriesCount-1)));
}
}
else {
rectY = rectY
// bars+gaps completed in current category;
+ (series*itemSpan/(categories*seriesCount));
if (seriesCount>1) {
rectY = rectY
+ (series*itemGapSpan/(categories*(seriesCount-1)));
}
}
// WIDTH
double rectWidth = Math.abs(translatedValue-zeroToJava2D);
// HEIGHT
double rectHeight = itemWidth;
// DRAW THE BAR...
Rectangle2D bar = new Rectangle2D.Double(rectX, rectY, rectWidth, rectHeight);
Paint seriesPaint = plot.getSeriesPaint(series);
g2.setPaint(seriesPaint);
g2.fill(bar);
if (itemWidth>BAR_OUTLINE_WIDTH_THRESHOLD) {
g2.setStroke(plot.getSeriesStroke(series));
g2.setPaint(plot.getSeriesOutlinePaint(series));
g2.draw(bar);
}
result = bar;
}
return result;
}
} | bsd-3-clause |
robertsmieja/clarity | src/main/java/skadistats/clarity/processor/modifiers/Modifiers.java | 959 | package skadistats.clarity.processor.modifiers;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import skadistats.clarity.event.Provides;
import skadistats.clarity.model.StringTable;
import skadistats.clarity.processor.runner.Context;
import skadistats.clarity.processor.stringtables.OnStringTableEntry;
import skadistats.clarity.wire.common.proto.DotaModifiers;
@Provides({OnModifierTableEntry.class})
public class Modifiers {
@OnStringTableEntry("ActiveModifiers")
public void onTableEntry(Context ctx, StringTable table, int index, String key, ByteString value) throws InvalidProtocolBufferException {
if (value != null) {
DotaModifiers.CDOTAModifierBuffTableEntry message = DotaModifiers.CDOTAModifierBuffTableEntry.parseFrom(value);
ctx.createEvent(OnModifierTableEntry.class, DotaModifiers.CDOTAModifierBuffTableEntry.class).raise(message);
}
}
}
| bsd-3-clause |
cameronbraid/rox | src/main/java/com/flat502/rox/marshal/MethodUnmarshaller.java | 641 | package com.flat502.rox.marshal;
/**
* Encapsulates methods common to all unmarshallers and provides type-safety.
*/
public interface MethodUnmarshaller {
/**
* This method is invoked when no specific
* {@link FieldNameCodec} exists.
* <p>
* For client implementations this typically means
* no codec has been set on the client.
* <p>
* For server implementations this typically means
* {@link UnmarshallerAid#getFieldNameCodec(String)}
* has returned <code>null</code>.
* @return
* the default codec associated with this
* unmarshaller.
*/
FieldNameCodec getDefaultFieldNameCodec();
}
| bsd-3-clause |
Keripo/Beats | src/com/beatsportable/beats/GUIDrawingArea.java | 927 | package com.beatsportable.beats;
import android.graphics.Bitmap;
import android.graphics.Canvas;
public abstract class GUIDrawingArea {
/** Stores information required to convert times to pixel locations, and draw stuff. */
//public abstract Canvas getCanvas();
public abstract int timeToY(float time); //converts a time to a y coordinate
public abstract int pitchToX(int pitch); //converts a pitch to an x coordinate
//given coordinates are the (0,0) coord of a 64x64 block
public abstract Bitmap getBitmap(String rsrc, int width, int height); //retreive a bitmap by resource id
public abstract void clearBitmaps(); //clear bitmap cache. grr garbage collector
//replace the current clipping region with the entire screen
public abstract void setClip_screen(Canvas canvas);
//replace the current clipping region with the area available for arrows
public abstract void setClip_arrowSpace(Canvas canvas);
}
| bsd-3-clause |
TreeBASE/treebasetest | treebase-core/src/test/java/org/cipres/treebase/dao/tree/PackageTestSuite.java | 1060 | package org.cipres.treebase.dao.tree;
import junit.framework.Test;
import junit.framework.TestSuite;
/**
* The class <code>PackageTestSuite</code> builds a suite that can be used to run all of the tests
* within its package as well as within any subpackages of its package.
*
* @generatedBy CodePro at 10/6/05 1:26 PM
* @author Jin Ruan
* @version $Revision: 1.0 $
*/
public class PackageTestSuite {
/**
* Launch the test.
*
* @param args the command line arguments
*
* @generatedBy CodePro at 10/6/05 1:26 PM
*/
public static void main(String[] args) {
junit.textui.TestRunner.run(suite());
}
/**
* Create a test suite that can run all of the test cases in this package and all subpackages.
*
* @return the test suite that was created
*
* @generatedBy CodePro at 10/6/05 1:26 PM
*/
public static Test suite() {
TestSuite suite = new TestSuite("Tests in package " + PackageTestSuite.class.getName());
suite.addTestSuite(PhyloTreeDAOTest.class);
return suite;
}
}
| bsd-3-clause |
phr00t/jmonkeyengine | navmesh/CleanNullRegionBorders.java | 30197 | package navmesh;
import java.util.ArrayDeque;
import navmesh.OpenHeightfield.OpenHeightFieldIterator;
/**
* Implements three algorithms that clean up issues that can
* develop around null region boarders.
*
* <p><strong>Detect and fix encompassed null regions:</strong></p>
* <p>If a null region is found that is fully encompassed by a single
* region, then the region will be split into two regions at the
* null region border.</p>
*
* <p><strong>Detect and fix "short wrapping" of null regions:</strong></p>
* <p>Regions can sometimes wrap slightly around the corner of a null region
* in a manner that eventually results in the formation of self-intersecting
* polygons.</p>
* <p>Example: Before the algorithm is applied:</p>
* <p><a href=
* "http://www.critterai.org/projects/nmgen/images/ohfg_08_cornerwrapbefore.png"
* target="_parent">
* <img alt="" src=
* "http://www.critterai.org/projects/nmgen/images/ohfg_08_cornerwrapbefore.jpg"
* style="width: 620px; height: 353px; " />
* </a></p>
* <p>Example: After the algorithm is applied:</p>
* <p><a href=
* "http://www.critterai.org/projects/nmgen/images/ohfg_09_cornerwrapafter.png"
* target="_parent">
* <img alt="" src=
* "http://www.critterai.org/projects/nmgen/images/ohfg_09_cornerwrapafter.jpg"
* style="width: 620px; height: 353px; " />
* </a></p>
*
* <p><strong>Detect and fix incomplete null region connections:</strong></p>
* <p>If a region touches null region only diagonally, then contour detection
* algorithms may not properly detect the null region connection. This can
* adversely effect other algorithms in the pipeline.<p>
* <p>Example: Before algorithm is applied:</p>
* <p><pre>
* b b a a a a
* b b a a a a
* a a x x x x
* a a x x x x
* </pre></p>
* <p>Example: After algorithm is applied:</p>
* <p><pre>
* b b a a a a
* b b b a a a <-- Span transferred to region B.
* a a x x x x
* a a x x x x
* </pre></p>
*
* @see <a href="http://www.critterai.org/nmgen_regiongen"
* target="_parent">Region Generation</a>
*/
public class CleanNullRegionBorders
implements IOpenHeightFieldAlgorithm
{
/*
* Design Notes:
*
* Three algorithms have been aggregated into this single class
* for performance reasons. Otherwise we would be stuck
* performing three full contour searches rather than one.
*
* The optimization method used in the search can result in missed
* null region contours. Consider the following pattern:
*
* x x x x x x
* x a a a a x x - null region WITHOUT SPANS
* x a v v a x a - region A
* x a v v a x v - null region WITHOUT SPANS
* x a a a a x
* x x x x x x
*
* If an all span search is performed, the outer null region (x) will
* be detected, but during the process all a-region spans will be marked as
* viewed. This will leave no spans available to detect the inner null
* region (v).
*
* I've not fixing this until it proves to be a problem or I figure out
* a way of resolving the design issue without killing performance.
*/
private static final int NULL_REGION = OpenHeightSpan.NULL_REGION;
private final boolean mUseOnlyNullSpans;
// Working variables. Content is meaningless outside of
// method they are used.
private final ArrayDeque<OpenHeightSpan> mwOpenSpans
= new ArrayDeque<OpenHeightSpan>(1024);
private final ArrayDeque<Integer> mwBorderDistance
= new ArrayDeque<Integer>(1024);
private final int[] mwNeighborRegions = new int[8];
/**
* Constructor.
* <p>Choosing a contour detection type:</p>
* <p>This algorithm has to detect and walk null region contours. (Where
* null regions border non-null regions.) There are two options for
* detection: Search every single span looking for null region
* neighbors. Or search only null region spans looking for
* non-null region neighbors. Since null region spans are only a tiny
* fraction of total spans, the second option has better performance.</p>
* <p>If a heightfield is constructed such that all null regions have
* at least one null region span in each contour, then set
* useOnlyNullRegionSpans to TRUE.</p>
* @param useOnlyNullRegionSpans If TRUE, then only null region spans
* will be used to initially detect null region borders. This
* improves performance. If FALSE, all spans are searched to detect
* borders.
*/
public CleanNullRegionBorders(boolean useOnlyNullRegionSpans)
{
mUseOnlyNullSpans = useOnlyNullRegionSpans;
}
/**
* {@inheritDoc}
* <p>This operation utilizes {@link OpenHeightSpan#flags}. It
* expects the value to be zero on entry, and re-zero's the value
* on exit.</p>
* <p>Expects a heightfield with fully built regions.</p>
*/
@Override
public void apply(OpenHeightfield field)
{
int nextRegionID = field.regionCount();
final OpenHeightFieldIterator iter = field.dataIterator();
// Iterate over the spans, trying to find null region borders.
while (iter.hasNext())
{
OpenHeightSpan span = iter.next();
if (span.flags != 0)
// Span was processed in a previous iteration.
// Ignore it.
continue;
span.flags = 1;
OpenHeightSpan workingSpan = null;
int edgeDirection = -1;
if (span.regionID() == NULL_REGION)
{
// This is a null region span. See if it
// connects to a span in a non-null region.
edgeDirection = getNonNullBorderDrection(span);
if (edgeDirection == -1)
// This span is not a border span. Ignore it.
continue;
// This is a border span. Step into the non-null
// region and swing the direction around 180 degrees.
workingSpan = span.getNeighbor(edgeDirection);
edgeDirection = (edgeDirection+2) & 0x3;
}
else if (!mUseOnlyNullSpans)
{
// This is a non-null region span and I'm allowed
// to look at it. See if it connects to a null region.
edgeDirection = getNullBorderDrection(span);
if (edgeDirection == -1)
// This span is not a null region border span. Ignore it.
continue;
workingSpan = span;
}
else
// Not interested in this span.
continue;
// Process the null region contour. Detect and fix
// local issues. Determine if the region is
// fully encompassed by a single non-null region.
boolean isEncompassedNullRegion = processNullRegion(
workingSpan
, edgeDirection);
if (isEncompassedNullRegion)
{
// This span is part of a group of null region spans
// that is encompassed within a single non-null region.
// This is not permitted. Need to fix it.
partialFloodRegion(workingSpan
, edgeDirection
, nextRegionID);
nextRegionID++;
}
}
field.setRegionCount(nextRegionID);
// Clear all flags.
iter.reset();
while (iter.hasNext())
{
iter.next().flags = 0;
}
}
/**
* Partially flood a region away from the specified direction.
* <p>{@link OpenHeightSpan#distanceToRegionCore()}
* is set to zero for all flooded spans.</p>
* @param startSpan The span to start the flood from.
* @param borderDirection The hard border for flooding. No
* spans in this direction from the startSpan will be flooded.
* @param newRegionID The region id to assign the flooded
* spans to.
*/
private void partialFloodRegion(OpenHeightSpan startSpan
, int borderDirection
, int newRegionID)
{
// Gather some information.
final int antiBorderDirection = (borderDirection+2) & 0x3;
final int regionID = startSpan.regionID();
// Re-assign the start span and queue it for the neighbor search.
startSpan.setRegionID(newRegionID);
startSpan.setDistanceToRegionCore(0); // This information is lost.
mwOpenSpans.add(startSpan);
mwBorderDistance.add(0);
// Search for new spans that can be assigned the new region.
while(!mwOpenSpans.isEmpty())
{
// Get the next span off the stack.
final OpenHeightSpan span = mwOpenSpans.pollLast();
final int distance = mwBorderDistance.pollLast();
// Search in all directions for neighbors.
for (int i = 0; i < 4; i++)
{
final OpenHeightSpan nSpan = span.getNeighbor(i);
if (nSpan == null
|| nSpan.regionID() != regionID)
// No span in this direction, or the span
// is not in the region being processed.
// Note: It may have already been transferred.
continue;
int nDistance = distance;
if (i == borderDirection)
{
// This neighbor is back toward the border.
if (distance == 0)
// The span is at the border. Can't go
// further in this direction. Ignore
// this neighbor.
continue;
nDistance--;
}
else if (i == antiBorderDirection)
// This neighbor is further away from the border.
nDistance++;
// Transfer the neighbor to the new region.
nSpan.setRegionID(newRegionID);
nSpan.setDistanceToRegionCore(0); // This information is lost.
// Add the span to the stack to be processed.
mwOpenSpans.add(nSpan);
mwBorderDistance.add(nDistance);
}
}
}
/**
* Detects and fixes bad span configurations in the vicinity of a
* null region contour. (See class description for details.)
* @param startSpan A span in a non-null region that borders a null
* region.
* @param startDirection The direction of the null region border.
* @return TRUE if the start span's region completely encompasses
* the null region.
*/
private boolean processNullRegion(OpenHeightSpan startSpan
, int startDirection)
{
/*
* This algorithm traverses the contour. As it does so, it detects
* and fixes various known dangerous span configurations.
*
* Traversing the contour: A good way to visualize it is to think
* of a robot sitting on the floor facing a known wall. It then
* does the following to skirt the wall:
* 1. If there is a wall in front of it, turn clockwise in 90 degrees
* increments until it finds the wall is gone.
* 2. Move forward one step.
* 3. Turn counter-clockwise by 90 degrees.
* 4. Repeat from step 1 until it finds itself at its original
* location facing its original direction.
*
* See also: http://www.critterai.org/nmgen_contourgen#robotwalk
*
* As the traversal occurs, the number of acute (90 degree) and
* obtuse (270 degree) corners are monitored. If a complete contour is
* detected and (obtuse corners > acute corners), then the null
* region is inside the contour. Otherwise the null region is
* outside the contour, which we don't care about.
*/
int borderRegionID = startSpan.regionID();
// Prepare for loop.
OpenHeightSpan span = startSpan;
OpenHeightSpan nSpan = null;
int dir = startDirection;
// Initialize monitoring variables.
int loopCount = 0;
int acuteCornerCount = 0;
int obtuseCornerCount = 0;
int stepsWithoutBorder = 0;
boolean borderSeenLastLoop = false;
boolean isBorder = true; // Initial value doesn't matter.
// Assume a single region is connected to the null region
// until proven otherwise.
boolean hasSingleConnection = true;
/*
* The loop limit exists for the sole reason of preventing
* an infinite loop in case of bad input data.
* It is set to a very high value because there is no way of
* definitively determining a safe smaller value. Setting
* the value too low can result in rescanning a contour
* multiple times, killing performance.
*/
while (++loopCount < Integer.MAX_VALUE)
{
// Get the span across the border.
nSpan = span.getNeighbor(dir);
// Detect which type of edge this direction points across.
if (nSpan == null)
{
// It points across a null region border edge.
isBorder = true;
}
else
{
// We never need to perform contour detection
// on this span again. So mark it as processed.
nSpan.flags = 1;
if (nSpan.regionID() == NULL_REGION)
{
// It points across a null region border edge.
isBorder = true;
}
else
{
// This isn't a null region border.
isBorder = false;
if (nSpan.regionID() != borderRegionID)
// It points across a border to a non-null region.
// This means the current contour can't
// represent a fully encompassed null region.
hasSingleConnection = false;
}
}
// Process the border.
if (isBorder)
{
// It is a border edge.
if (borderSeenLastLoop)
{
/*
* A border was detected during the last loop as well.
* Two detections in a row indicates we passed an acute
* (inner) corner.
*
* a x
* x x
*/
acuteCornerCount++;
}
else if (stepsWithoutBorder > 1)
{
/*
* We have moved at least two spans before detecting
* a border. This indicates we passed an obtuse
* (outer) corner.
*
* a a
* a x
*/
obtuseCornerCount++;
stepsWithoutBorder = 0;
// Detect and fix span configuraiton issue around this
// corner.
if (processOuterCorner(span, dir))
// A change was made and it resulted in the
// corner area having multiple region connections.
hasSingleConnection = false;
}
dir = (dir+1) & 0x3; // Rotate in clockwise direction.
borderSeenLastLoop = true;
stepsWithoutBorder = 0;
}
else
{
/*
* Not a null region border.
* Move to the neighbor and swing the search direction back
* one increment (counterclockwise). By moving the direction
* back one increment we guarantee we don't miss any edges.
*/
span = nSpan;
dir = (dir+3) & 0x3; // Rotate counterclockwise direction.
borderSeenLastLoop = false;
stepsWithoutBorder++;
}
if (startSpan == span && startDirection == dir)
// Have returned to the original span and direction.
// The search is complete.
// Is the null region inside the contour?
return (hasSingleConnection
&& obtuseCornerCount > acuteCornerCount);
}
// If got here then the null region boarder is too large to be fully
// explored. So it can't be encompassed.
return false;
}
/**
* Detects and fixes span configuration issues in the vicinity
* of obtuse (outer) null region corners.
* @param referenceSpan The span in a non-null region that is
* just past the outer corner.
* @param borderDirection The direciton of the null region border.
* @return TRUE if more than one region connects to the null region
* in the vicinity of the corner. (This may or may not be due to
* a change made by this operation.)
*/
private boolean processOuterCorner(OpenHeightSpan referenceSpan
, int borderDirection)
{
boolean hasMultiRegions = false;
// Get the previous two spans along the border.
OpenHeightSpan backOne =
referenceSpan.getNeighbor((borderDirection+3) & 0x3);
OpenHeightSpan backTwo = backOne.getNeighbor(borderDirection);
OpenHeightSpan testSpan;
if (backOne.regionID() != referenceSpan.regionID()
&& backTwo.regionID() == referenceSpan.regionID())
{
/*
* Dangerous corner configuration.
*
* a x
* b a
*
* Need to change to one of the following configurations:
*
* b x a x
* b a b b
*
* Reason: During contour detection this type of configuration can
* result in the region connection being detected as a
* region-region portal, when it is not. The region connection
* is actually interrupted by the null region.
*
* This configuration has been demonstrated to result in
* two regions being improperly merged to encompass an
* internal null region.
*
* Example:
*
* a a x x x a
* a a x x a a
* b b a a a a
* b b a a a a
*
* During contour and connection detection for region b, at no
* point will the null region be detected. It will appear
* as if a clean a-b portal exists.
*
* An investigation into fixing this issue via updates to the
* watershed or contour detection algorithms did not turn
* up a better way of resolving this issue.
*/
hasMultiRegions = true;
// Determine how many connections backTwo has to backOne's region.
testSpan = backOne.getNeighbor((borderDirection+3) & 0x3);
int backTwoConnections = 0;
if (testSpan != null
&& testSpan.regionID() == backOne.regionID())
{
backTwoConnections++;
testSpan = testSpan.getNeighbor(borderDirection);
if (testSpan != null
&& testSpan.regionID() == backOne.regionID())
backTwoConnections++;
}
// Determine how many connections the reference span has
// to backOne's region.
int referenceConnections = 0;
testSpan = backOne.getNeighbor((borderDirection+2) & 0x3);
if (testSpan != null
&& testSpan.regionID() == backOne.regionID())
{
referenceConnections++;
testSpan = testSpan.getNeighbor((borderDirection+2) & 0x3);
if (testSpan != null
&& testSpan.regionID() == backOne.regionID())
backTwoConnections++;
}
// Change the region of the span that has the most connections
// to the target region.
if (referenceConnections > backTwoConnections)
referenceSpan.setRegionID(backOne.regionID());
else
backTwo.setRegionID(backOne.regionID());
}
else if (backOne.regionID() == referenceSpan.regionID()
&& backTwo.regionID() == referenceSpan.regionID())
{
/*
* Potential dangerous short wrap.
*
* a x
* a a
*
* Example of actual problem configuration:
*
* b b x x
* b a x x <- Short wrap.
* b a a a
*
* In the above case, the short wrap around the corner of the
* null region has been demonstrated to cause self-intersecting
* polygons during polygon formation.
*
* This algorithm detects whether or not one (and only one)
* of the axis neighbors of the corner should be re-assigned to
* a more appropriate region.
*
* In the above example, the following configuration is more
* appropriate:
*
* b b x x
* b b x x <- Change to this row.
* b a a a
*/
// Check to see if backTwo should be in a different region.
int selectedRegion = selectedRegionID(backTwo
, (borderDirection+1) & 0x3
, (borderDirection+2) & 0x3);
if (selectedRegion == backTwo.regionID())
{
// backTwo should not be re-assigned. How about
// the reference span?
selectedRegion = selectedRegionID(referenceSpan
, borderDirection
, (borderDirection+3) & 0x3);
if (selectedRegion != referenceSpan.regionID())
{
// The reference span should be reassigned
// to a new region.
referenceSpan.setRegionID(selectedRegion);
hasMultiRegions = true;
}
}
else
{
// backTwo should be re-assigned to a new region.
backTwo.setRegionID(selectedRegion);
hasMultiRegions = true;
}
}
else
/*
* No dangerous configurations detected. But definitely
* has a change in regions at the corner. (We know this
* because one of the previous checks looked for a single
* region for all wrap spans.)
*/
hasMultiRegions = true;
return hasMultiRegions;
}
/**
* Checks the span to see if it should be reassigned to a new region.
* @param referenceSpan A span on one side of an null region contour's
* outer corner. It is expected that the all spans that wrap the
* corner are in the same region.
* @param borderDirection The direction of the null region border.
* @param cornerDirection The direction of the outer corner from the
* reference span.
* @return The region the span should be a member of. May be the
* region the span is currently a member of.
*/
private int selectedRegionID(OpenHeightSpan referenceSpan
, int borderDirection
, int cornerDirection)
{
// Get the regions of all neighbors.
referenceSpan.getDetailedRegionMap(mwNeighborRegions, 0);
/*
* Initial example state:
*
* a - Known region.
* x - Null region.
* u - Unknown, not checked yet.
*
* u u u
* u a x
* u a a
*/
// The only possible alternate region id is from
// the span that is opposite the border. So check it first.
int regionID = mwNeighborRegions[(borderDirection+2) & 0x3];
if (regionID == referenceSpan.regionID()
|| regionID == NULL_REGION)
/*
* The region away from the border is either a null region
* or the same region. So we keep the current region.
*
* u u u u u u
* a a x or x a x <-- Potentially bad, but stuck with it.
* u a a u a a
*/
return referenceSpan.regionID();
// Candidate region for re-assignment.
int potentialRegion = regionID;
// Next we check the region opposite from the corner direction.
// If it is the current region, then we definitely can't
// change the region id without risk of splitting the region.
regionID = mwNeighborRegions[(cornerDirection+2) & 0x3];
if (regionID == referenceSpan.regionID() || regionID == NULL_REGION)
/*
* The region opposite from the corner direction is
* either a null region or the same region. So we
* keep the current region.
*
* u a u u x u
* b a x or b a x
* u a a u a a
*/
return referenceSpan.regionID();
/*
* We have checked the early exit special cases. Now a generalized
* brute count is performed.
*
* Priority is given to the potential region. Here is why:
* (Highly unlikely worst case scenario.)
*
* c c c c c c
* b a x -> b b x Select b even though b count == a count.
* b a a b a a
*/
// Neighbors in potential region.
// We know this will have a minimum value of 1.
int potentialCount = 0;
// Neighbors in the span's current region.
// We know this will have a minimum value of 2.
int currentCount = 0;
/*
* Maximum edge case:
*
* b b b
* b a x
* b a a
*
* The maximum edge case for region A can't exist. It
* is filtered out during one of the earlier special cases
* handlers.
*
* Other cases may exist if more regions are involved.
* Such cases will tend to favor the current region.
*/
for (int i = 0; i < 8; i++)
{
if (mwNeighborRegions[i] == referenceSpan.regionID())
currentCount++;
else if (mwNeighborRegions[i] == potentialRegion)
potentialCount++;
}
return (potentialCount < currentCount
? referenceSpan.regionID() : potentialRegion);
}
/**
* Returns the direction of the first neighbor in a non-null region.
* @param span The span to check.
* @return The direction of the first neighbor in a non-null region, or
* -1 if all neighbors are in the null region.
*/
private static int getNonNullBorderDrection(OpenHeightSpan span)
{
// Search axis-neighbors.
for (int dir = 0; dir < 4; ++dir)
{
OpenHeightSpan nSpan = span.getNeighbor(dir);
if (nSpan != null && nSpan.regionID() != NULL_REGION)
// The neighbor is a non-null region.
return dir;
}
// All neighbors are in the null region.
return -1;
}
/**
* Returns the direction of the first neighbor in the null region.
* @param span The span to check.
* @return The direction of the first neighbor that is in the null
* region, or -1 if there are no null region neighbors.
*/
private static int getNullBorderDrection(OpenHeightSpan span)
{
// Search axis-neighbors.
for (int dir = 0; dir < 4; ++dir)
{
OpenHeightSpan nSpan = span.getNeighbor(dir);
if (nSpan == null || nSpan.regionID() == NULL_REGION)
// The neighbor is a null region.
return dir;
}
// All neighbors are in a non-null region.
return -1;
}
}
| bsd-3-clause |