repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
aaronwalker/camel | camel-core/src/test/java/org/apache/camel/processor/UnmarshalProcessorTest.java | 4219 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.Processor;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.TestSupport;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.camel.impl.DefaultMessage;
import org.apache.camel.spi.DataFormat;
public class UnmarshalProcessorTest extends TestSupport {
public void testDataFormatReturnsSameExchange() throws Exception {
Exchange exchange = createExchangeWithBody(new DefaultCamelContext(), "body");
Processor processor = new UnmarshalProcessor(new MyDataFormat(exchange));
processor.process(exchange);
assertEquals("UnmarshalProcessor did not copy OUT from IN message", "body", exchange.getOut().getBody());
}
public void testDataFormatReturnsAnotherExchange() throws Exception {
CamelContext context = new DefaultCamelContext();
Exchange exchange = createExchangeWithBody(context, "body");
Exchange exchange2 = createExchangeWithBody(context, "body2");
Processor processor = new UnmarshalProcessor(new MyDataFormat(exchange2));
try {
processor.process(exchange);
fail("Should have thrown exception");
} catch (RuntimeCamelException e) {
assertEquals("The returned exchange " + exchange2 + " is not the same as " + exchange + " provided to the DataFormat", e.getMessage());
}
}
public void testDataFormatReturnsMessage() throws Exception {
Exchange exchange = createExchangeWithBody(new DefaultCamelContext(), "body");
Message out = new DefaultMessage();
out.setBody(new Object());
Processor processor = new UnmarshalProcessor(new MyDataFormat(out));
processor.process(exchange);
assertSame("UnmarshalProcessor did not make use of the returned OUT message", out, exchange.getOut());
assertSame("UnmarshalProcessor did change the body bound to the OUT message", out.getBody(), exchange.getOut().getBody());
}
public void testDataFormatReturnsBody() throws Exception {
Exchange exchange = createExchangeWithBody(new DefaultCamelContext(), "body");
Object unmarshalled = new Object();
Processor processor = new UnmarshalProcessor(new MyDataFormat(unmarshalled));
processor.process(exchange);
assertSame("UnmarshalProcessor did not make use of the returned object being returned while unmarshalling", unmarshalled, exchange.getOut().getBody());
}
private static class MyDataFormat implements DataFormat {
private final Object object;
MyDataFormat(Exchange exchange) {
object = exchange;
}
MyDataFormat(Message message) {
object = message;
}
MyDataFormat(Object unmarshalled) {
object = unmarshalled;
}
@Override
public void marshal(Exchange exchange, Object graph, OutputStream stream) throws Exception {
throw new IllegalAccessException("This method is not expected to be used by UnmarshalProcessor");
}
@Override
public Object unmarshal(Exchange exchange, InputStream stream) throws Exception {
return object;
}
}
}
| apache-2.0 |
dkhwangbo/druid | java-util/src/main/java/org/apache/druid/java/util/common/CloseableIterators.java | 2771 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.java.util.common;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterators;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.parsers.CloseableIterator;
import javax.annotation.Nullable;
import java.io.Closeable;
import java.io.IOException;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
public class CloseableIterators
{
public static <T> CloseableIterator<T> concat(List<? extends CloseableIterator<? extends T>> iterators)
{
final Closer closer = Closer.create();
iterators.forEach(closer::register);
final Iterator<T> innerIterator = Iterators.concat(iterators.iterator());
return wrap(innerIterator, closer);
}
public static <T> CloseableIterator<T> mergeSorted(
List<? extends CloseableIterator<? extends T>> iterators,
Comparator<T> comparator
)
{
Preconditions.checkNotNull(comparator);
final Closer closer = Closer.create();
iterators.forEach(closer::register);
final Iterator<T> innerIterator = Iterators.mergeSorted(iterators, comparator);
return wrap(innerIterator, closer);
}
public static <T> CloseableIterator<T> wrap(Iterator<T> innerIterator, @Nullable Closeable closeable)
{
return new CloseableIterator<T>()
{
private boolean closed;
@Override
public boolean hasNext()
{
return innerIterator.hasNext();
}
@Override
public T next()
{
return innerIterator.next();
}
@Override
public void close() throws IOException
{
if (!closed) {
if (closeable != null) {
closeable.close();
}
closed = true;
}
}
};
}
public static <T> CloseableIterator<T> withEmptyBaggage(Iterator<T> innerIterator)
{
return wrap(innerIterator, null);
}
private CloseableIterators() {}
}
| apache-2.0 |
niuzhijun66/NiuStudyDemo | app/src/main/java/com/code/constant/StringEvent.java | 187 | package com.code.constant;
/**
* Created by niu on 2017/8/17.
*/
public class StringEvent {
//网络状态改变
public static String NET_STATE_CHANGE = "net_state_change";
}
| apache-2.0 |
Orange-OpenSource/cf-java-client | cloudfoundry-util/src/main/java/org/cloudfoundry/util/ExceptionUtils.java | 3609 | /*
* Copyright 2013-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.util;
import org.cloudfoundry.client.v2.ClientV2Exception;
import reactor.core.publisher.Mono;
import java.util.Arrays;
import java.util.function.Predicate;
/**
* Utilities for dealing with {@link Exception}s
*/
public final class ExceptionUtils {
private ExceptionUtils() {
}
/**
* Returns a {@link Mono} containing an {@link IllegalArgumentException} with the configured message
*
* @param format A <a href="../util/Formatter.html#syntax">format string</a>
* @param args Arguments referenced by the format specifiers in the format string. If there are more arguments than format specifiers, the extra arguments are ignored. The number of arguments
* is variable and may be zero. The maximum number of arguments is limited by the maximum dimension of a Java array as defined by <cite>The Java™ Virtual Machine
* Specification</cite>. The behaviour on a {@code null} argument depends on the <a href="../util/Formatter.html#syntax">conversion</a>.
* @param <T> the type of the {@link Mono} being converted
* @return a {@link Mono} containing the error
*/
public static <T> Mono<T> illegalArgument(String format, Object... args) {
String message = String.format(format, args);
return Mono.error(new IllegalArgumentException(message));
}
/**
* Returns a {@link Mono} containing an {@link IllegalStateException} with the configured message
*
* @param format A <a href="../util/Formatter.html#syntax">format string</a>
* @param args Arguments referenced by the format specifiers in the format string. If there are more arguments than format specifiers, the extra arguments are ignored. The number of arguments
* is variable and may be zero. The maximum number of arguments is limited by the maximum dimension of a Java array as defined by <cite>The Java™ Virtual Machine
* Specification</cite>. The behaviour on a {@code null} argument depends on the <a href="../util/Formatter.html#syntax">conversion</a>.
* @param <T> the type of the {@link Mono} being converted
* @return a {@link Mono} containing the error
*/
public static <T> Mono<T> illegalState(String format, Object... args) {
String message = String.format(format, args);
return Mono.error(new IllegalStateException(message));
}
/**
* A predicate that returns {@code true} if the exception is a {@link ClientV2Exception} and its code matches expectation
*
* @param codes the codes to match
* @return {@code true} if the exception is a {@link ClientV2Exception} and its code matches
*/
public static Predicate<? super Throwable> statusCode(int... codes) {
return t -> t instanceof ClientV2Exception &&
Arrays.stream(codes).anyMatch(candidate -> ((ClientV2Exception) t).getCode().equals(candidate));
}
}
| apache-2.0 |
laurelnaiad/marklogic-samplestack-old | appserver/java-spring/src/main/java/com/marklogic/samplestack/web/security/SamplestackAuthenticationEntryPoint.java | 2075 | /*
* Copyright 2012-2014 MarkLogic Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.marklogic.samplestack.web.security;
import java.io.IOException;
import java.io.Writer;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletResponseWrapper;
import org.apache.http.HttpStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.web.AuthenticationEntryPoint;
import org.springframework.stereotype.Component;
import com.marklogic.samplestack.web.JsonHttpResponse;
@Component
/**
* Class to customize the default Login handling. Rather than redirection
* to a login form, Samplestack simply denies access
* (where authentication is required)
*/
public class SamplestackAuthenticationEntryPoint implements
AuthenticationEntryPoint {
@Autowired
private JsonHttpResponse errors;
@Override
/**
* Override handler that returns 401 for any unauthenticated
* request to a secured endpoint.
*/
public void commence(HttpServletRequest request,
HttpServletResponse response, AuthenticationException authException)
throws IOException {
HttpServletResponseWrapper responseWrapper = new HttpServletResponseWrapper(
response);
responseWrapper.setStatus(HttpStatus.SC_UNAUTHORIZED);
Writer out = responseWrapper.getWriter();
errors.writeJsonResponse(out, HttpStatus.SC_UNAUTHORIZED, "Unauthorized");
out.close();
}
}
| apache-2.0 |
jmostella/armeria | core/src/main/java/com/linecorp/armeria/common/HttpData.java | 9114 | /*
* Copyright 2016 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.common;
import static java.util.Objects.requireNonNull;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Formatter;
import java.util.Locale;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufUtil;
import it.unimi.dsi.fastutil.io.FastByteArrayInputStream;
/**
* HTTP/2 data. Helpers in this class create {@link HttpData} objects that leave the stream open.
* To create a {@link HttpData} that closes the stream, directly instantiate {@link DefaultHttpData}.
*
* <p>Implementations should generally extend {@link AbstractHttpData} to interact with other {@link HttpData}
* implementations.
*/
public interface HttpData extends HttpObject {
/**
* Empty HTTP/2 data.
*/
HttpData EMPTY_DATA = new DefaultHttpData(new byte[0], 0, 0, false);
/**
* Creates a new instance from the specified byte array. The array is not copied; any changes made in the
* array later will be visible to {@link HttpData}.
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of the specified array is 0.
*/
static HttpData of(byte[] data) {
requireNonNull(data, "data");
if (data.length == 0) {
return EMPTY_DATA;
}
return new DefaultHttpData(data, 0, data.length, false);
}
/**
* Creates a new instance from the specified byte array, {@code offset} and {@code length}.
* The array is not copied; any changes made in the array later will be visible to {@link HttpData}.
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code length} is 0.
*
* @throws ArrayIndexOutOfBoundsException if {@code offset} and {@code length} are out of bounds
*/
static HttpData of(byte[] data, int offset, int length) {
requireNonNull(data);
if (offset < 0 || length < 0 || offset > data.length - length) {
throw new ArrayIndexOutOfBoundsException(
"offset: " + offset + ", length: " + length + ", data.length: " + data.length);
}
if (length == 0) {
return EMPTY_DATA;
}
return new DefaultHttpData(data, offset, length, false);
}
/**
* Converts the specified {@code text} into an {@link HttpData}.
*
* @param charset the {@link Charset} to use for encoding {@code text}
* @param text the {@link String} to convert
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of {@code text} is 0.
*/
static HttpData of(Charset charset, String text) {
requireNonNull(charset, "charset");
requireNonNull(text, "text");
if (text.isEmpty()) {
return EMPTY_DATA;
}
return of(text.getBytes(charset));
}
/**
* Converts the specified Netty {@link ByteBuf} into an {@link HttpData}. Unlike {@link #of(byte[])}, this
* method makes a copy of the {@link ByteBuf}.
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the readable bytes of {@code buf} is 0.
*/
static HttpData of(ByteBuf buf) {
requireNonNull(buf, "buf");
if (!buf.isReadable()) {
return EMPTY_DATA;
}
return of(ByteBufUtil.getBytes(buf));
}
/**
* Converts the specified formatted string into an {@link HttpData}. The string is formatted by
* {@link String#format(Locale, String, Object...)} with {@linkplain Locale#ENGLISH English locale}.
*
* @param charset the {@link Charset} to use for encoding string
* @param format {@linkplain Formatter the format string} of the response content
* @param args the arguments referenced by the format specifiers in the format string
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code format} is empty.
*/
static HttpData of(Charset charset, String format, Object... args) {
requireNonNull(charset, "charset");
requireNonNull(format, "format");
requireNonNull(args, "args");
if (format.isEmpty()) {
return EMPTY_DATA;
}
return of(String.format(Locale.ENGLISH, format, args).getBytes(charset));
}
/**
* Converts the specified {@code text} into a UTF-8 {@link HttpData}.
*
* @param text the {@link String} to convert
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of {@code text} is 0.
*/
static HttpData ofUtf8(String text) {
return of(StandardCharsets.UTF_8, text);
}
/**
* Converts the specified formatted string into a UTF-8 {@link HttpData}. The string is formatted by
* {@link String#format(Locale, String, Object...)} with {@linkplain Locale#ENGLISH English locale}.
*
* @param format {@linkplain Formatter the format string} of the response content
* @param args the arguments referenced by the format specifiers in the format string
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code format} is empty.
*/
static HttpData ofUtf8(String format, Object... args) {
return of(StandardCharsets.UTF_8, format, args);
}
/**
* Converts the specified {@code text} into a US-ASCII {@link HttpData}.
*
* @param text the {@link String} to convert
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of {@code text} is 0.
*/
static HttpData ofAscii(String text) {
return of(StandardCharsets.US_ASCII, text);
}
/**
* Converts the specified formatted string into a US-ASCII {@link HttpData}. The string is formatted by
* {@link String#format(Locale, String, Object...)} with {@linkplain Locale#ENGLISH English locale}.
*
* @param format {@linkplain Formatter the format string} of the response content
* @param args the arguments referenced by the format specifiers in the format string
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code format} is empty.
*/
static HttpData ofAscii(String format, Object... args) {
return of(StandardCharsets.US_ASCII, format, args);
}
/**
* Returns the underlying byte array of this data.
*/
byte[] array();
/**
* Returns the start offset of the {@link #array()}.
*/
int offset();
/**
* Returns the length of this data.
*/
int length();
/**
* Returns whether the {@link #length()} is 0.
*/
default boolean isEmpty() {
return length() == 0;
}
/**
* Decodes this data into a {@link String}.
*
* @param charset the {@link Charset} to use for decoding this data
*
* @return the decoded {@link String}
*/
default String toString(Charset charset) {
requireNonNull(charset, "charset");
return new String(array(), offset(), length(), charset);
}
/**
* Decodes this data into a {@link String} using UTF-8 encoding.
*
* @return the decoded {@link String}
*/
default String toStringUtf8() {
return toString(StandardCharsets.UTF_8);
}
/**
* Decodes this data into a {@link String} using US-ASCII encoding.
*
* @return the decoded {@link String}
*/
default String toStringAscii() {
return toString(StandardCharsets.US_ASCII);
}
/**
* Returns a new {@link InputStream} that is sourced from this data.
*/
default InputStream toInputStream() {
return new FastByteArrayInputStream(array(), offset(), length());
}
/**
* Returns a new {@link Reader} that is sourced from this data and decoded using the specified
* {@link Charset}.
*/
default Reader toReader(Charset charset) {
requireNonNull(charset, "charset");
return new InputStreamReader(toInputStream(), charset);
}
/**
* Returns a new {@link Reader} that is sourced from this data and decoded using
* {@link StandardCharsets#UTF_8}.
*/
default Reader toReaderUtf8() {
return toReader(StandardCharsets.UTF_8);
}
/**
* Returns a new {@link Reader} that is sourced from this data and decoded using
* {@link StandardCharsets#US_ASCII}.
*/
default Reader toReaderAscii() {
return toReader(StandardCharsets.US_ASCII);
}
}
| apache-2.0 |
yanzhijun/jclouds-aliyun | apis/openstack-nova/src/test/java/org/jclouds/openstack/nova/v2_0/EndpointIdIsRandomExpectTest.java | 2716 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.openstack.nova.v2_0;
import static org.jclouds.Constants.PROPERTY_ENDPOINT;
import static org.testng.Assert.assertEquals;
import java.util.Properties;
import org.jclouds.http.HttpRequest;
import org.jclouds.http.HttpResponse;
import org.jclouds.openstack.nova.v2_0.internal.BaseNovaApiExpectTest;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableSet;
/**
* Tests to ensure that we can pick the only endpoint of a service
*/
@Test(groups = "unit", testName = "EndpointIdIsRandomExpectTest")
public class EndpointIdIsRandomExpectTest extends BaseNovaApiExpectTest {
public EndpointIdIsRandomExpectTest() {
this.identity = "demo:demo";
this.credential = "password";
}
@Override
protected Properties setupProperties() {
Properties overrides = super.setupProperties();
overrides.setProperty(PROPERTY_ENDPOINT, "http://10.10.10.10:5000/v2.0/");
return overrides;
}
public void testVersionMatchOnConfiguredRegionsWhenResponseIs2xx() {
HttpRequest authenticate = HttpRequest
.builder()
.method("POST")
.endpoint("http://10.10.10.10:5000/v2.0/tokens")
.addHeader("Accept", "application/json")
.payload(
payloadFromStringWithContentType(
"{\"auth\":{\"passwordCredentials\":{\"username\":\"demo\",\"password\":\"password\"},\"tenantName\":\"demo\"}}",
"application/json")).build();
HttpResponse authenticationResponse = HttpResponse.builder().statusCode(200)
.payload(payloadFromResourceWithContentType("/access_version_uids.json", "application/json")).build();
NovaApi whenNovaRegionExists = requestSendsResponse(authenticate, authenticationResponse);
assertEquals(whenNovaRegionExists.getConfiguredRegions(), ImmutableSet.of("RegionOne"));
}
}
| apache-2.0 |
lovepoem/dubbo | dubbo-registry/dubbo-registry-default/src/main/java/org/apache/dubbo/registry/dubbo/DubboRegistry.java | 5956 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.registry.dubbo;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.Version;
import org.apache.dubbo.common.utils.ExecutorUtil;
import org.apache.dubbo.common.utils.NamedThreadFactory;
import org.apache.dubbo.common.utils.NetUtils;
import org.apache.dubbo.registry.NotifyListener;
import org.apache.dubbo.registry.RegistryService;
import org.apache.dubbo.registry.support.FailbackRegistry;
import org.apache.dubbo.remoting.Constants;
import org.apache.dubbo.rpc.Invoker;
import java.util.List;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
import static org.apache.dubbo.registry.Constants.REGISTRY_RECONNECT_PERIOD_KEY;
/**
* DubboRegistry
*/
public class DubboRegistry extends FailbackRegistry {
// Reconnecting detection cycle: 3 seconds (unit:millisecond)
private static final int RECONNECT_PERIOD_DEFAULT = 3 * 1000;
// Scheduled executor service
private final ScheduledExecutorService reconnectTimer = Executors.newScheduledThreadPool(1, new NamedThreadFactory("DubboRegistryReconnectTimer", true));
// Reconnection timer, regular check connection is available. If unavailable, unlimited reconnection.
private final ScheduledFuture<?> reconnectFuture;
// The lock for client acquisition process, lock the creation process of the client instance to prevent repeated clients
private final ReentrantLock clientLock = new ReentrantLock();
private final Invoker<RegistryService> registryInvoker;
private final RegistryService registryService;
/**
* The time in milliseconds the reconnectTimer will wait
*/
private final int reconnectPeriod;
public DubboRegistry(Invoker<RegistryService> registryInvoker, RegistryService registryService) {
super(registryInvoker.getUrl());
this.registryInvoker = registryInvoker;
this.registryService = registryService;
// Start reconnection timer
this.reconnectPeriod = registryInvoker.getUrl().getParameter(REGISTRY_RECONNECT_PERIOD_KEY, RECONNECT_PERIOD_DEFAULT);
reconnectFuture = reconnectTimer.scheduleWithFixedDelay(() -> {
// Check and connect to the registry
try {
connect();
} catch (Throwable t) { // Defensive fault tolerance
logger.error("Unexpected error occur at reconnect, cause: " + t.getMessage(), t);
}
}, reconnectPeriod, reconnectPeriod, TimeUnit.MILLISECONDS);
}
protected final void connect() {
try {
// Check whether or not it is connected
if (isAvailable()) {
return;
}
if (logger.isInfoEnabled()) {
logger.info("Reconnect to registry " + getUrl());
}
clientLock.lock();
try {
// Double check whether or not it is connected
if (isAvailable()) {
return;
}
recover();
} finally {
clientLock.unlock();
}
} catch (Throwable t) { // Ignore all the exceptions and wait for the next retry
if (getUrl().getParameter(Constants.CHECK_KEY, true)) {
if (t instanceof RuntimeException) {
throw (RuntimeException) t;
}
throw new RuntimeException(t.getMessage(), t);
}
logger.error("Failed to connect to registry " + getUrl().getAddress() + " from provider/consumer " + NetUtils.getLocalHost() + " use dubbo " + Version.getVersion() + ", cause: " + t.getMessage(), t);
}
}
@Override
public boolean isAvailable() {
if (registryInvoker == null) {
return false;
}
return registryInvoker.isAvailable();
}
@Override
public void destroy() {
super.destroy();
try {
// Cancel the reconnection timer
ExecutorUtil.cancelScheduledFuture(reconnectFuture);
} catch (Throwable t) {
logger.warn("Failed to cancel reconnect timer", t);
}
registryInvoker.destroy();
ExecutorUtil.gracefulShutdown(reconnectTimer, reconnectPeriod);
}
@Override
public void doRegister(URL url) {
registryService.register(url);
}
@Override
public void doUnregister(URL url) {
registryService.unregister(url);
}
@Override
public void doSubscribe(URL url, NotifyListener listener) {
registryService.subscribe(url, listener);
}
@Override
public void doUnsubscribe(URL url, NotifyListener listener) {
registryService.unsubscribe(url, listener);
}
@Override
public List<URL> lookup(URL url) {
return registryService.lookup(url);
}
}
| apache-2.0 |
jbeecham/ovirt-engine | backend/manager/modules/bll/src/test/java/org/ovirt/engine/core/itests/ClientHandshakeSequenceTest.java | 1920 | package org.ovirt.engine.core.itests;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNotNull;
import org.ovirt.engine.core.common.queries.*;
import org.ovirt.engine.core.common.action.LoginUserParameters;
import org.ovirt.engine.core.common.action.VdcReturnValueBase;
import org.ovirt.engine.core.common.action.VdcActionType;
import org.ovirt.engine.core.common.action.RunVmParams;
import org.ovirt.engine.core.compat.Guid;
/**
* Created by IntelliJ IDEA. User: gmostizk Date: Aug 31, 2009 Time: 11:28:01 AM To change this template use File |
* Settings | File Templates.
*/
@Ignore
public class ClientHandshakeSequenceTest extends AbstractBackendTest {
@Test
public void getDomainList() {
VdcQueryReturnValue value = backend.RunPublicQuery(VdcQueryType.GetDomainList, new VdcQueryParametersBase());
assertTrue(value.getSucceeded());
assertNotNull(value.getReturnValue());
System.out.println(value.getReturnValue());
}
@Test
public void getVersion() {
VdcQueryReturnValue value = backend.RunPublicQuery(VdcQueryType.GetConfigurationValue,
new GetConfigurationValueParameters(ConfigurationValues.VdcVersion));
assertNotNull(value);
assertNotNull(value.getReturnValue());
System.out.println("Version: " + value.getReturnValue());
}
@Test
public void loginAdmin() {
VdcReturnValueBase value = backend.Login(new LoginUserParameters("admin", "admin", "domain", "os", "browser",
"client_type"));
assertTrue(value.getSucceeded());
assertNotNull(value.getActionReturnValue());
}
@Test
public void testRunVm() {
RunVmParams params = new RunVmParams(Guid.NewGuid());
VdcReturnValueBase result = backend.runInternalAction(VdcActionType.RunVm, params);
}
}
| apache-2.0 |
visouza/solr-5.0.0 | lucene/analysis/common/src/test/org/apache/lucene/analysis/reverse/TestReverseStringFilter.java | 5094 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.analysis.reverse;
import java.io.IOException;
import java.io.StringReader;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.KeywordTokenizer;
public class TestReverseStringFilter extends BaseTokenStreamTestCase {
public void testFilter() throws Exception {
TokenStream stream = new MockTokenizer(MockTokenizer.WHITESPACE, false); // 1-4 length string
((Tokenizer)stream).setReader(new StringReader("Do have a nice day"));
ReverseStringFilter filter = new ReverseStringFilter(stream);
assertTokenStreamContents(filter, new String[] { "oD", "evah", "a", "ecin", "yad" });
}
public void testFilterWithMark() throws Exception {
TokenStream stream = new MockTokenizer(MockTokenizer.WHITESPACE, false); // 1-4 length string
((Tokenizer)stream).setReader(new StringReader("Do have a nice day"));
ReverseStringFilter filter = new ReverseStringFilter(stream, '\u0001');
assertTokenStreamContents(filter,
new String[] { "\u0001oD", "\u0001evah", "\u0001a", "\u0001ecin", "\u0001yad" });
}
public void testReverseString() throws Exception {
assertEquals( "A", ReverseStringFilter.reverse( "A" ) );
assertEquals( "BA", ReverseStringFilter.reverse( "AB" ) );
assertEquals( "CBA", ReverseStringFilter.reverse( "ABC" ) );
}
public void testReverseChar() throws Exception {
char[] buffer = { 'A', 'B', 'C', 'D', 'E', 'F' };
ReverseStringFilter.reverse( buffer, 2, 3 );
assertEquals( "ABEDCF", new String( buffer ) );
}
public void testReverseSupplementary() throws Exception {
// supplementary at end
assertEquals("𩬅艱鍟䇹愯瀛", ReverseStringFilter.reverse("瀛愯䇹鍟艱𩬅"));
// supplementary at end - 1
assertEquals("a𩬅艱鍟䇹愯瀛", ReverseStringFilter.reverse("瀛愯䇹鍟艱𩬅a"));
// supplementary at start
assertEquals("fedcba𩬅", ReverseStringFilter.reverse("𩬅abcdef"));
// supplementary at start + 1
assertEquals("fedcba𩬅z", ReverseStringFilter.reverse("z𩬅abcdef"));
// supplementary medial
assertEquals("gfe𩬅dcba", ReverseStringFilter.reverse("abcd𩬅efg"));
}
public void testReverseSupplementaryChar() throws Exception {
// supplementary at end
char[] buffer = "abc瀛愯䇹鍟艱𩬅".toCharArray();
ReverseStringFilter.reverse(buffer, 3, 7);
assertEquals("abc𩬅艱鍟䇹愯瀛", new String(buffer));
// supplementary at end - 1
buffer = "abc瀛愯䇹鍟艱𩬅d".toCharArray();
ReverseStringFilter.reverse(buffer, 3, 8);
assertEquals("abcd𩬅艱鍟䇹愯瀛", new String(buffer));
// supplementary at start
buffer = "abc𩬅瀛愯䇹鍟艱".toCharArray();
ReverseStringFilter.reverse(buffer, 3, 7);
assertEquals("abc艱鍟䇹愯瀛𩬅", new String(buffer));
// supplementary at start + 1
buffer = "abcd𩬅瀛愯䇹鍟艱".toCharArray();
ReverseStringFilter.reverse(buffer, 3, 8);
assertEquals("abc艱鍟䇹愯瀛𩬅d", new String(buffer));
// supplementary medial
buffer = "abc瀛愯𩬅def".toCharArray();
ReverseStringFilter.reverse(buffer, 3, 7);
assertEquals("abcfed𩬅愯瀛", new String(buffer));
}
/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false);
return new TokenStreamComponents(tokenizer, new ReverseStringFilter(tokenizer));
}
};
checkRandomData(random(), a, 1000*RANDOM_MULTIPLIER);
}
public void testEmptyTerm() throws IOException {
Analyzer a = new Analyzer() {
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new KeywordTokenizer();
return new TokenStreamComponents(tokenizer, new ReverseStringFilter(tokenizer));
}
};
checkOneTerm(a, "", "");
}
}
| apache-2.0 |
cfibmers/open-Autoscaler | server/src/main/java/org/cloudfoundry/autoscaler/data/couchdb/dao/impl/AppInstanceMetricsDAOImpl.java | 6943 | package org.cloudfoundry.autoscaler.data.couchdb.dao.impl;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.cloudfoundry.autoscaler.data.couchdb.dao.AppInstanceMetricsDAO;
import org.cloudfoundry.autoscaler.data.couchdb.dao.base.TypedCouchDbRepositorySupport;
import org.cloudfoundry.autoscaler.data.couchdb.document.AppInstanceMetrics;
import org.ektorp.ComplexKey;
import org.ektorp.CouchDbConnector;
import org.ektorp.ViewQuery;
import org.ektorp.support.View;
public class AppInstanceMetricsDAOImpl extends CommonDAOImpl implements AppInstanceMetricsDAO {
@View(name = "byAll", map = "function(doc) { if (doc.type == 'AppInstanceMetrics' ) emit([doc.appId, doc.appType, doc.timestamp], doc._id)}")
private static class AppInstanceMetricsRepository_All extends TypedCouchDbRepositorySupport<AppInstanceMetrics> {
public AppInstanceMetricsRepository_All(CouchDbConnector db) {
super(AppInstanceMetrics.class, db, "AppInstanceMetrics_byAll");
}
public List<AppInstanceMetrics> getAllRecords() {
return queryView("byAll");
}
}
@View(name = "by_appId", map = "function(doc) { if (doc.type=='AppInstanceMetrics' && doc.appId) { emit([doc.appId], doc._id) } }")
private static class AppInstanceMetricsRepository_ByAppId
extends TypedCouchDbRepositorySupport<AppInstanceMetrics> {
public AppInstanceMetricsRepository_ByAppId(CouchDbConnector db) {
super(AppInstanceMetrics.class, db, "AppInstanceMetrics_ByAppId");
}
public List<AppInstanceMetrics> findByAppId(String appId) {
ComplexKey key = ComplexKey.of(appId);
return queryView("by_appId", key);
}
}
@View(name = "by_appId_between", map = "function(doc) { if (doc.type=='AppInstanceMetrics' && doc.appId && doc.timestamp) { emit([doc.appId, doc.timestamp], doc._id) } }")
private static class AppInstanceMetricsRepository_ByAppIdBetween
extends TypedCouchDbRepositorySupport<AppInstanceMetrics> {
public AppInstanceMetricsRepository_ByAppIdBetween(CouchDbConnector db) {
super(AppInstanceMetrics.class, db, "AppInstanceMetrics_ByAppIdBetween");
}
public List<AppInstanceMetrics> findByAppIdBetween(String appId, long startTimestamp, long endTimestamp)
throws Exception {
ComplexKey startKey = ComplexKey.of(appId, startTimestamp);
ComplexKey endKey = ComplexKey.of(appId, endTimestamp);
ViewQuery q = createQuery("by_appId_between").includeDocs(true).startKey(startKey).endKey(endKey);
List<AppInstanceMetrics> returnvalue = null;
String[] input = beforeConnection("QUERY", new String[] { "by_appId_between", appId,
String.valueOf(startTimestamp), String.valueOf(endTimestamp) });
try {
returnvalue = db.queryView(q, AppInstanceMetrics.class);
} catch (Exception e) {
e.printStackTrace();
}
afterConnection(input);
return returnvalue;
}
}
@View(name = "by_serviceId_before", map = "function(doc) { if (doc.type=='AppInstanceMetrics' && doc.serviceId && doc.timestamp) { emit([ doc.serviceId, doc.timestamp], doc._id) } }")
private static class AppInstanceMetricsRepository_ByServiceId_Before
extends TypedCouchDbRepositorySupport<AppInstanceMetrics> {
public AppInstanceMetricsRepository_ByServiceId_Before(CouchDbConnector db) {
super(AppInstanceMetrics.class, db, "AppInstanceMetrics_ByServiceId");
}
public List<AppInstanceMetrics> findByServiceIdBefore(String serviceId, long olderThan) throws Exception {
ComplexKey startKey = ComplexKey.of(serviceId, 0);
ComplexKey endKey = ComplexKey.of(serviceId, olderThan);
ViewQuery q = createQuery("by_serviceId_before").includeDocs(true).startKey(startKey).endKey(endKey);
List<AppInstanceMetrics> returnvalue = null;
String[] input = beforeConnection("QUERY",
new String[] { "by_serviceId_before", serviceId, String.valueOf(0), String.valueOf(olderThan) });
try {
returnvalue = db.queryView(q, AppInstanceMetrics.class);
} catch (Exception e) {
e.printStackTrace();
}
afterConnection(input);
return returnvalue;
}
}
private static final Logger logger = Logger.getLogger(AppInstanceMetricsDAOImpl.class);
private AppInstanceMetricsRepository_All metricsRepoAll;
private AppInstanceMetricsRepository_ByAppId metricsRepoByAppId;
private AppInstanceMetricsRepository_ByAppIdBetween metricsRepoByAppIdBetween;
private AppInstanceMetricsRepository_ByServiceId_Before metricsRepoByServiceIdBefore;
public AppInstanceMetricsDAOImpl(CouchDbConnector db) {
metricsRepoAll = new AppInstanceMetricsRepository_All(db);
metricsRepoByAppId = new AppInstanceMetricsRepository_ByAppId(db);
metricsRepoByAppIdBetween = new AppInstanceMetricsRepository_ByAppIdBetween(db);
metricsRepoByServiceIdBefore = new AppInstanceMetricsRepository_ByServiceId_Before(db);
}
public AppInstanceMetricsDAOImpl(CouchDbConnector db, boolean initDesignDocument) {
this(db);
if (initDesignDocument) {
try {
initAllRepos();
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
}
@Override
public List<AppInstanceMetrics> findAll() {
// TODO Auto-generated method stub
return this.metricsRepoAll.getAllRecords();
}
@Override
public List<AppInstanceMetrics> findByAppId(String appId) {
// TODO Auto-generated method stub
return this.metricsRepoByAppId.findByAppId(appId);
}
@Override
public List<AppInstanceMetrics> findByAppIdBetween(String appId, long startTimestamp, long endTimestamp)
throws Exception {
// TODO Auto-generated method stub
return this.metricsRepoByAppIdBetween.findByAppIdBetween(appId, startTimestamp, endTimestamp);
}
@Override
public List<AppInstanceMetrics> findByServiceIdBefore(String serviceId, long olderThan) throws Exception {
// TODO Auto-generated method stub
return this.metricsRepoByServiceIdBefore.findByServiceIdBefore(serviceId, olderThan);
}
@Override
public List<AppInstanceMetrics> findByAppIdAfter(String appId, long timestamp) throws Exception {
try {
return findByAppIdBetween(appId, timestamp, System.currentTimeMillis());
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return null;
}
@SuppressWarnings("unchecked")
@Override
public <T> TypedCouchDbRepositorySupport<T> getDefaultRepo() {
// TODO Auto-generated method stub
return (TypedCouchDbRepositorySupport<T>) this.metricsRepoAll;
}
@SuppressWarnings("unchecked")
@Override
public <T> List<TypedCouchDbRepositorySupport<T>> getAllRepos() {
// TODO Auto-generated method stub
List<TypedCouchDbRepositorySupport<T>> repoList = new ArrayList<TypedCouchDbRepositorySupport<T>>();
repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoAll);
repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoByAppId);
repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoByAppIdBetween);
repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoByServiceIdBefore);
return repoList;
}
}
| apache-2.0 |
xuse/ef-orm | common-core/src/main/java/jef/common/wrapper/IHolder.java | 139 | package jef.common.wrapper;
import java.io.Serializable;
public interface IHolder<T> extends Serializable{
T get();
void set(T obj);
}
| apache-2.0 |
jayway/rest-assured | rest-assured/src/main/java/io/restassured/response/Validatable.java | 1199 | /*
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.restassured.response;
public interface Validatable<T extends ValidatableResponseOptions<T, R>, R extends ResponseBody<R> & ResponseOptions<R>> {
/**
* Returns a validatable response that's lets you validate the response. Usage example:
* <p/>
* <pre>
* given().
* param("firstName", "John").
* param("lastName", "Doe").
* when().
* get("/greet").
* then().
* body("greeting", equalTo("John Doe"));
* </pre>
*
* @return A validatable response
*/
T then();
}
| apache-2.0 |
killbill/killbill-meter-plugin | src/main/java/org/killbill/billing/plugin/meter/timeline/shutdown/StartTimes.java | 3097 | /*
* Copyright 2010-2014 Ning, Inc.
* Copyright 2014 The Billing Project, LLC
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.killbill.billing.plugin.meter.timeline.shutdown;
import java.util.HashMap;
import java.util.Map;
import org.joda.time.DateTime;
/**
* This class is used solely as a Json mapping class when saving timelines in a database
* blob on shutdown, and restoring them on startup.
* <p/>
* The Map<Integer, Map<Integer, DateTime>> maps from sourceId to eventCategoryId to startTime.
*/
public class StartTimes {
private final DateTime timeInserted;
private final Map<Integer, Map<Integer, DateTime>> startTimesMap;
private DateTime minStartTime;
public StartTimes(final DateTime timeInserted, final Map<Integer, Map<Integer, DateTime>> startTimesMap) {
this.timeInserted = timeInserted;
this.startTimesMap = startTimesMap;
DateTime minDateTime = new DateTime(Long.MAX_VALUE);
for (final Map<Integer, DateTime> categoryMap : startTimesMap.values()) {
for (final DateTime startTime : categoryMap.values()) {
if (minDateTime.isAfter(startTime)) {
minDateTime = startTime;
}
}
}
this.minStartTime = minDateTime;
}
public StartTimes() {
this.timeInserted = new DateTime();
minStartTime = new DateTime(Long.MAX_VALUE);
this.startTimesMap = new HashMap<Integer, Map<Integer, DateTime>>();
}
public void addTime(final int sourceId, final int categoryId, final DateTime dateTime) {
Map<Integer, DateTime> sourceTimes = startTimesMap.get(sourceId);
if (sourceTimes == null) {
sourceTimes = new HashMap<Integer, DateTime>();
startTimesMap.put(sourceId, sourceTimes);
}
sourceTimes.put(categoryId, dateTime);
if (dateTime.isBefore(minStartTime)) {
minStartTime = dateTime;
}
}
public DateTime getStartTimeForSourceIdAndCategoryId(final int sourceId, final int categoryId) {
final Map<Integer, DateTime> sourceTimes = startTimesMap.get(sourceId);
if (sourceTimes != null) {
return sourceTimes.get(categoryId);
} else {
return null;
}
}
public Map<Integer, Map<Integer, DateTime>> getStartTimesMap() {
return startTimesMap;
}
public DateTime getTimeInserted() {
return timeInserted;
}
public DateTime getMinStartTime() {
return minStartTime;
}
}
| apache-2.0 |
lizhanhui/data_druid | server/src/test/java/io/druid/metadata/IndexerSQLMetadataStorageCoordinatorTest.java | 26606 | /*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.metadata;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.druid.indexing.overlord.DataSourceMetadata;
import io.druid.indexing.overlord.ObjectMetadata;
import io.druid.indexing.overlord.SegmentPublishResult;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.java.util.common.StringUtils;
import io.druid.timeline.DataSegment;
import io.druid.timeline.partition.LinearShardSpec;
import io.druid.timeline.partition.NoneShardSpec;
import io.druid.timeline.partition.NumberedShardSpec;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.tweak.HandleCallback;
import org.skife.jdbi.v2.util.StringMapper;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
public class IndexerSQLMetadataStorageCoordinatorTest
{
@Rule
public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule();
private final ObjectMapper mapper = new DefaultObjectMapper();
private final DataSegment defaultSegment = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"version",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new LinearShardSpec(0),
9,
100
);
private final DataSegment defaultSegment2 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"version",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new LinearShardSpec(1),
9,
100
);
private final DataSegment defaultSegment3 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-03T00Z/2015-01-04T00Z"),
"version",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
NoneShardSpec.instance(),
9,
100
);
// Overshadows defaultSegment, defaultSegment2
private final DataSegment defaultSegment4 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"zversion",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new LinearShardSpec(0),
9,
100
);
private final DataSegment numberedSegment0of0 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"zversion",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new NumberedShardSpec(0, 0),
9,
100
);
private final DataSegment numberedSegment1of0 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"zversion",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new NumberedShardSpec(1, 0),
9,
100
);
private final DataSegment numberedSegment2of0 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"zversion",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new NumberedShardSpec(2, 0),
9,
100
);
private final DataSegment numberedSegment2of1 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"zversion",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new NumberedShardSpec(2, 1),
9,
100
);
private final DataSegment numberedSegment3of1 = new DataSegment(
"fooDataSource",
Interval.parse("2015-01-01T00Z/2015-01-02T00Z"),
"zversion",
ImmutableMap.<String, Object>of(),
ImmutableList.of("dim1"),
ImmutableList.of("m1"),
new NumberedShardSpec(3, 1),
9,
100
);
private final Set<DataSegment> SEGMENTS = ImmutableSet.of(defaultSegment, defaultSegment2);
private final AtomicLong metadataUpdateCounter = new AtomicLong();
private IndexerSQLMetadataStorageCoordinator coordinator;
private TestDerbyConnector derbyConnector;
@Before
public void setUp()
{
derbyConnector = derbyConnectorRule.getConnector();
mapper.registerSubtypes(LinearShardSpec.class);
derbyConnector.createDataSourceTable();
derbyConnector.createTaskTables();
derbyConnector.createSegmentTable();
metadataUpdateCounter.set(0);
coordinator = new IndexerSQLMetadataStorageCoordinator(
mapper,
derbyConnectorRule.metadataTablesConfigSupplier().get(),
derbyConnector
)
{
@Override
protected DataSourceMetadataUpdateResult updateDataSourceMetadataWithHandle(
Handle handle,
String dataSource,
DataSourceMetadata startMetadata,
DataSourceMetadata endMetadata
) throws IOException
{
// Count number of times this method is called.
metadataUpdateCounter.getAndIncrement();
return super.updateDataSourceMetadataWithHandle(handle, dataSource, startMetadata, endMetadata);
}
};
}
private void unUseSegment()
{
for (final DataSegment segment : SEGMENTS) {
Assert.assertEquals(
1, (int) derbyConnector.getDBI().<Integer>withHandle(
new HandleCallback<Integer>()
{
@Override
public Integer withHandle(Handle handle) throws Exception
{
return handle.createStatement(
StringUtils.format(
"UPDATE %s SET used = false WHERE id = :id",
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable()
)
).bind("id", segment.getIdentifier()).execute();
}
}
)
);
}
}
private List<String> getUsedIdentifiers()
{
final String table = derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable();
return derbyConnector.retryWithHandle(
new HandleCallback<List<String>>()
{
@Override
public List<String> withHandle(Handle handle) throws Exception
{
return handle.createQuery("SELECT id FROM " + table + " WHERE used = true ORDER BY id")
.map(StringMapper.FIRST)
.list();
}
}
);
}
@Test
public void testSimpleAnnounce() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
for (DataSegment segment : SEGMENTS) {
Assert.assertArrayEquals(
mapper.writeValueAsString(segment).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
segment.getIdentifier()
)
);
}
Assert.assertEquals(
ImmutableList.of(defaultSegment.getIdentifier(), defaultSegment2.getIdentifier()),
getUsedIdentifiers()
);
// Should not update dataSource metadata.
Assert.assertEquals(0, metadataUpdateCounter.get());
}
@Test
public void testOvershadowingAnnounce() throws IOException
{
final ImmutableSet<DataSegment> segments = ImmutableSet.of(defaultSegment, defaultSegment2, defaultSegment4);
coordinator.announceHistoricalSegments(segments);
for (DataSegment segment : segments) {
Assert.assertArrayEquals(
mapper.writeValueAsString(segment).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
segment.getIdentifier()
)
);
}
Assert.assertEquals(ImmutableList.of(defaultSegment4.getIdentifier()), getUsedIdentifiers());
}
@Test
public void testTransactionalAnnounceSuccess() throws IOException
{
// Insert first segment.
final SegmentPublishResult result1 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment),
new ObjectMetadata(null),
new ObjectMetadata(ImmutableMap.of("foo", "bar"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(defaultSegment), true), result1);
Assert.assertArrayEquals(
mapper.writeValueAsString(defaultSegment).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
defaultSegment.getIdentifier()
)
);
// Insert second segment.
final SegmentPublishResult result2 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment2),
new ObjectMetadata(ImmutableMap.of("foo", "bar")),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(defaultSegment2), true), result2);
Assert.assertArrayEquals(
mapper.writeValueAsString(defaultSegment2).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
defaultSegment2.getIdentifier()
)
);
// Examine metadata.
Assert.assertEquals(
new ObjectMetadata(ImmutableMap.of("foo", "baz")),
coordinator.getDataSourceMetadata("fooDataSource")
);
// Should only be tried once per call.
Assert.assertEquals(2, metadataUpdateCounter.get());
}
@Test
public void testTransactionalAnnounceRetryAndSuccess() throws IOException
{
final AtomicLong attemptCounter = new AtomicLong();
final IndexerSQLMetadataStorageCoordinator failOnceCoordinator = new IndexerSQLMetadataStorageCoordinator(
mapper,
derbyConnectorRule.metadataTablesConfigSupplier().get(),
derbyConnector
)
{
@Override
protected DataSourceMetadataUpdateResult updateDataSourceMetadataWithHandle(
Handle handle,
String dataSource,
DataSourceMetadata startMetadata,
DataSourceMetadata endMetadata
) throws IOException
{
metadataUpdateCounter.getAndIncrement();
if (attemptCounter.getAndIncrement() == 0) {
return DataSourceMetadataUpdateResult.TRY_AGAIN;
} else {
return super.updateDataSourceMetadataWithHandle(handle, dataSource, startMetadata, endMetadata);
}
}
};
// Insert first segment.
final SegmentPublishResult result1 = failOnceCoordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment),
new ObjectMetadata(null),
new ObjectMetadata(ImmutableMap.of("foo", "bar"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(defaultSegment), true), result1);
Assert.assertArrayEquals(
mapper.writeValueAsString(defaultSegment).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
defaultSegment.getIdentifier()
)
);
// Reset attempt counter to induce another failure.
attemptCounter.set(0);
// Insert second segment.
final SegmentPublishResult result2 = failOnceCoordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment2),
new ObjectMetadata(ImmutableMap.of("foo", "bar")),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(defaultSegment2), true), result2);
Assert.assertArrayEquals(
mapper.writeValueAsString(defaultSegment2).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
defaultSegment2.getIdentifier()
)
);
// Examine metadata.
Assert.assertEquals(
new ObjectMetadata(ImmutableMap.of("foo", "baz")),
failOnceCoordinator.getDataSourceMetadata("fooDataSource")
);
// Should be tried twice per call.
Assert.assertEquals(4, metadataUpdateCounter.get());
}
@Test
public void testTransactionalAnnounceFailDbNullWantNotNull() throws IOException
{
final SegmentPublishResult result1 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment),
new ObjectMetadata(ImmutableMap.of("foo", "bar")),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.<DataSegment>of(), false), result1);
// Should only be tried once.
Assert.assertEquals(1, metadataUpdateCounter.get());
}
@Test
public void testTransactionalAnnounceFailDbNotNullWantNull() throws IOException
{
final SegmentPublishResult result1 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment),
new ObjectMetadata(null),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(defaultSegment), true), result1);
final SegmentPublishResult result2 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment2),
new ObjectMetadata(null),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.<DataSegment>of(), false), result2);
// Should only be tried once per call.
Assert.assertEquals(2, metadataUpdateCounter.get());
}
@Test
public void testTransactionalAnnounceFailDbNotNullWantDifferent() throws IOException
{
final SegmentPublishResult result1 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment),
new ObjectMetadata(null),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.of(defaultSegment), true), result1);
final SegmentPublishResult result2 = coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment2),
new ObjectMetadata(ImmutableMap.of("foo", "qux")),
new ObjectMetadata(ImmutableMap.of("foo", "baz"))
);
Assert.assertEquals(new SegmentPublishResult(ImmutableSet.<DataSegment>of(), false), result2);
// Should only be tried once per call.
Assert.assertEquals(2, metadataUpdateCounter.get());
}
@Test
public void testSimpleUsedList() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval()
)
)
);
}
@Test
public void testMultiIntervalUsedList() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
coordinator.announceHistoricalSegments(ImmutableSet.of(defaultSegment3));
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForIntervals(
defaultSegment.getDataSource(),
ImmutableList.of(defaultSegment.getInterval())
)
)
);
Assert.assertEquals(
ImmutableSet.of(defaultSegment3),
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForIntervals(
defaultSegment.getDataSource(),
ImmutableList.of(defaultSegment3.getInterval())
)
)
);
Assert.assertEquals(
ImmutableSet.of(defaultSegment, defaultSegment2, defaultSegment3),
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForIntervals(
defaultSegment.getDataSource(),
ImmutableList.of(defaultSegment.getInterval(), defaultSegment3.getInterval())
)
)
);
//case to check no duplication if two intervals overlapped with the interval of same segment.
Assert.assertEquals(
ImmutableList.of(defaultSegment3),
coordinator.getUsedSegmentsForIntervals(
defaultSegment.getDataSource(),
ImmutableList.of(
Interval.parse("2015-01-03T00Z/2015-01-03T05Z"),
Interval.parse("2015-01-03T09Z/2015-01-04T00Z")
)
)
);
}
@Test
public void testSimpleUnUsedList() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval()
)
)
);
}
@Test
public void testUsedOverlapLow() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Set<DataSegment> actualSegments = ImmutableSet.copyOf(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
Interval.parse("2014-12-31T23:59:59.999Z/2015-01-01T00:00:00.001Z") // end is exclusive
)
);
Assert.assertEquals(
SEGMENTS,
actualSegments
);
}
@Test
public void testUsedOverlapHigh() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
Interval.parse("2015-1-1T23:59:59.999Z/2015-02-01T00Z")
)
)
);
}
@Test
public void testUsedOutOfBoundsLow() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Assert.assertTrue(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
new Interval(defaultSegment.getInterval().getStart().minus(1), defaultSegment.getInterval().getStart())
).isEmpty()
);
}
@Test
public void testUsedOutOfBoundsHigh() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Assert.assertTrue(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
new Interval(defaultSegment.getInterval().getEnd(), defaultSegment.getInterval().getEnd().plusDays(10))
).isEmpty()
);
}
@Test
public void testUsedWithinBoundsEnd() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withEnd(defaultSegment.getInterval().getEnd().minusMillis(1))
)
)
);
}
@Test
public void testUsedOverlapEnd() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUsedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withEnd(defaultSegment.getInterval().getEnd().plusMillis(1))
)
)
);
}
@Test
public void testUnUsedOverlapLow() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertTrue(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
new Interval(
defaultSegment.getInterval().getStart().minus(1),
defaultSegment.getInterval().getStart().plus(1)
)
).isEmpty()
);
}
@Test
public void testUnUsedUnderlapLow() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertTrue(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
new Interval(defaultSegment.getInterval().getStart().plus(1), defaultSegment.getInterval().getEnd())
).isEmpty()
);
}
@Test
public void testUnUsedUnderlapHigh() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertTrue(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
new Interval(defaultSegment.getInterval().getStart(), defaultSegment.getInterval().getEnd().minus(1))
).isEmpty()
);
}
@Test
public void testUnUsedOverlapHigh() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertTrue(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withStart(defaultSegment.getInterval().getEnd().minus(1))
).isEmpty()
);
}
@Test
public void testUnUsedBigOverlap() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
Interval.parse("2000/2999")
)
)
);
}
@Test
public void testUnUsedLowRange() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withStart(defaultSegment.getInterval().getStart().minus(1))
)
)
);
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withStart(defaultSegment.getInterval().getStart().minusYears(1))
)
)
);
}
@Test
public void testUnUsedHighRange() throws IOException
{
coordinator.announceHistoricalSegments(SEGMENTS);
unUseSegment();
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withEnd(defaultSegment.getInterval().getEnd().plus(1))
)
)
);
Assert.assertEquals(
SEGMENTS,
ImmutableSet.copyOf(
coordinator.getUnusedSegmentsForInterval(
defaultSegment.getDataSource(),
defaultSegment.getInterval().withEnd(defaultSegment.getInterval().getEnd().plusYears(1))
)
)
);
}
@Test
public void testDeleteDataSourceMetadata() throws IOException
{
coordinator.announceHistoricalSegments(
ImmutableSet.of(defaultSegment),
new ObjectMetadata(null),
new ObjectMetadata(ImmutableMap.of("foo", "bar"))
);
Assert.assertEquals(
new ObjectMetadata(ImmutableMap.of("foo", "bar")),
coordinator.getDataSourceMetadata("fooDataSource")
);
Assert.assertFalse("deleteInvalidDataSourceMetadata", coordinator.deleteDataSourceMetadata("nonExistentDS"));
Assert.assertTrue("deleteValidDataSourceMetadata", coordinator.deleteDataSourceMetadata("fooDataSource"));
Assert.assertNull("getDataSourceMetadataNullAfterDelete", coordinator.getDataSourceMetadata("fooDataSource"));
}
@Test
public void testSingleAdditionalNumberedShardWithNoCorePartitions() throws IOException
{
additionalNumberedShardTest(ImmutableSet.of(numberedSegment0of0));
}
@Test
public void testMultipleAdditionalNumberedShardsWithNoCorePartitions() throws IOException
{
additionalNumberedShardTest(ImmutableSet.of(numberedSegment0of0, numberedSegment1of0, numberedSegment2of0));
}
@Test
public void testSingleAdditionalNumberedShardWithOneCorePartition() throws IOException
{
additionalNumberedShardTest(ImmutableSet.of(numberedSegment2of1));
}
@Test
public void testMultipleAdditionalNumberedShardsWithOneCorePartition() throws IOException
{
additionalNumberedShardTest(ImmutableSet.of(numberedSegment2of1, numberedSegment3of1));
}
private void additionalNumberedShardTest(Set<DataSegment> segments) throws IOException
{
coordinator.announceHistoricalSegments(segments);
for (DataSegment segment : segments) {
Assert.assertArrayEquals(
mapper.writeValueAsString(segment).getBytes("UTF-8"),
derbyConnector.lookup(
derbyConnectorRule.metadataTablesConfigSupplier().get().getSegmentsTable(),
"id",
"payload",
segment.getIdentifier()
)
);
}
Assert.assertEquals(
segments.stream().map(DataSegment::getIdentifier).collect(Collectors.toList()),
getUsedIdentifiers()
);
// Should not update dataSource metadata.
Assert.assertEquals(0, metadataUpdateCounter.get());
}
}
| apache-2.0 |
apache/sis | core/sis-utility/src/main/java/org/apache/sis/util/CharSequences.java | 106867 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.util;
import java.util.Arrays;
import java.nio.CharBuffer;
import org.opengis.metadata.citation.Citation; // For javadoc
import org.opengis.referencing.IdentifiedObject; // For javadoc
import static java.lang.Character.*;
/**
* Static methods working with {@link CharSequence} instances. Some methods defined in this
* class duplicate the functionalities already provided in the standard {@link String} class,
* but works on a generic {@code CharSequence} instance instead of {@code String}.
*
* <h2>Unicode support</h2>
* Every methods defined in this class work on <cite>code points</cite> instead of characters
* when appropriate. Consequently those methods should behave correctly with characters outside
* the <cite>Basic Multilingual Plane</cite> (BMP).
*
* <h2>Policy on space characters</h2>
* Java defines two methods for testing if a character is a white space:
* {@link Character#isWhitespace(int)} and {@link Character#isSpaceChar(int)}.
* Those two methods differ in the way they handle {@linkplain Characters#NO_BREAK_SPACE
* no-break spaces}, tabulations and line feeds. The general policy in the SIS library is:
*
* <ul>
* <li>Use {@code isWhitespace(…)} when separating entities (words, numbers, tokens, <i>etc.</i>)
* in a list. Using that method, characters separated by a no-break space are considered as
* part of the same entity.</li>
* <li>Use {@code isSpaceChar(…)} when parsing a single entity, for example a single word.
* Using this method, no-break spaces are considered as part of the entity while line
* feeds or tabulations are entity boundaries.</li>
* </ul>
*
* <div class="note"><b>Example:</b>
* Numbers formatted in the French locale use no-break spaces as group separators. When parsing a list of numbers,
* ordinary spaces around the numbers may need to be ignored, but no-break spaces shall be considered as part of the
* numbers. Consequently {@code isWhitespace(…)} is appropriate for skipping spaces <em>between</em> the numbers.
* But if there is spaces to skip <em>inside</em> a single number, then {@code isSpaceChar(…)} is a good choice
* for accepting no-break spaces and for stopping the parse operation at tabulations or line feed character.
* A tabulation or line feed between two characters is very likely to separate two distinct values.</div>
*
* In practice, the {@link java.text.Format} implementations in the SIS library typically use
* {@code isSpaceChar(…)} while most of the rest of the SIS library, including this
* {@code CharSequences} class, consistently uses {@code isWhitespace(…)}.
*
* <p>Note that the {@link String#trim()} method doesn't follow any of those policies and should
* generally be avoided. That {@code trim()} method removes every ISO control characters without
* distinction about whether the characters are space or not, and ignore all Unicode spaces.
* The {@link #trimWhitespaces(String)} method defined in this class can be used as an alternative.</p>
*
* <h2>Handling of null values</h2>
* Most methods in this class accept a {@code null} {@code CharSequence} argument. In such cases
* the method return value is either a {@code null} {@code CharSequence}, an empty array, or a
* {@code 0} or {@code false} primitive type calculated as if the input was an empty string.
*
* @author Martin Desruisseaux (Geomatys)
* @version 1.1
*
* @see StringBuilders
*
* @since 0.3
* @module
*/
public final class CharSequences extends Static {
/**
* An array of zero-length. This constant play a role equivalents to
* {@link java.util.Collections#EMPTY_LIST}.
*/
public static final String[] EMPTY_ARRAY = new String[0];
/**
* An array of strings containing only white spaces. String lengths are equal to their
* index in the {@code spaces} array. For example, {@code spaces[4]} contains a string
* of length 4. Strings are constructed only when first needed.
*/
private static final String[] SPACES = new String[10];
/**
* Do not allow instantiation of this class.
*/
private CharSequences() {
}
/**
* Returns the code point after the given index. This method completes
* {@link Character#codePointBefore(CharSequence, int)} but is rarely used because slightly
* inefficient (in most cases, the code point at {@code index} is known together with the
* corresponding {@code charCount(int)} value, so the method calls should be unnecessary).
*/
private static int codePointAfter(final CharSequence text, final int index) {
return codePointAt(text, index + charCount(codePointAt(text, index)));
}
/**
* Returns a character sequence of the specified length filled with white spaces.
*
* <h4>Use case</h4>
* This method is typically invoked for performing right-alignment of text on the
* {@linkplain java.io.Console console} or other device using monospaced font.
* Callers compute a value for the {@code length} argument by (<var>desired width</var> - <var>used width</var>).
* Since the <var>used width</var> value may be greater than expected, this method handle negative {@code length}
* values as if the value was zero.
*
* @param length the string length. Negative values are clamped to 0.
* @return a string of length {@code length} filled with white spaces.
*/
public static CharSequence spaces(final int length) {
/*
* No need to synchronize. In the unlikely event of two threads calling this method
* at the same time and the two calls creating a new string, the String.intern() call
* will take care of canonicalizing the strings.
*/
if (length <= 0) {
return "";
}
if (length < SPACES.length) {
String s = SPACES[length - 1];
if (s == null) {
final char[] spaces = new char[length];
Arrays.fill(spaces, ' ');
s = new String(spaces).intern();
SPACES[length - 1] = s;
}
return s;
}
return new CharSequence() {
@Override public int length() {
return length;
}
@Override public char charAt(int index) {
ArgumentChecks.ensureValidIndex(length, index);
return ' ';
}
@Override public CharSequence subSequence(final int start, final int end) {
ArgumentChecks.ensureValidIndexRange(length, start, end);
final int n = end - start;
return (n == length) ? this : spaces(n);
}
@Override public String toString() {
final char[] array = new char[length];
Arrays.fill(array, ' ');
return new String(array);
}
};
}
/**
* Returns the {@linkplain CharSequence#length() length} of the given characters sequence,
* or 0 if {@code null}.
*
* @param text the character sequence from which to get the length, or {@code null}.
* @return the length of the character sequence, or 0 if the argument is {@code null}.
*/
public static int length(final CharSequence text) {
return (text != null) ? text.length() : 0;
}
/**
* Returns the number of Unicode code points in the given characters sequence,
* or 0 if {@code null}. Unpaired surrogates within the text count as one code
* point each.
*
* @param text the character sequence from which to get the count, or {@code null}.
* @return the number of Unicode code points, or 0 if the argument is {@code null}.
*
* @see #codePointCount(CharSequence, int, int)
*/
public static int codePointCount(final CharSequence text) {
return (text != null) ? codePointCount(text, 0, text.length()) : 0;
}
/**
* Returns the number of Unicode code points in the given characters sub-sequence,
* or 0 if {@code null}. Unpaired surrogates within the text count as one code
* point each.
*
* <p>This method performs the same work than the standard
* {@link Character#codePointCount(CharSequence, int, int)} method, except that it tries
* to delegate to the optimized methods from the {@link String}, {@link StringBuilder},
* {@link StringBuffer} or {@link CharBuffer} classes if possible.</p>
*
* @param text the character sequence from which to get the count, or {@code null}.
* @param fromIndex the index from which to start the computation.
* @param toIndex the index after the last character to take in account.
* @return the number of Unicode code points, or 0 if the argument is {@code null}.
*
* @see Character#codePointCount(CharSequence, int, int)
* @see String#codePointCount(int, int)
* @see StringBuilder#codePointCount(int, int)
*/
public static int codePointCount(final CharSequence text, final int fromIndex, final int toIndex) {
if (text == null) return 0;
if (text instanceof String) return ((String) text).codePointCount(fromIndex, toIndex);
if (text instanceof StringBuilder) return ((StringBuilder) text).codePointCount(fromIndex, toIndex);
if (text instanceof StringBuffer) return ((StringBuffer) text).codePointCount(fromIndex, toIndex);
if (text instanceof CharBuffer) {
final CharBuffer buffer = (CharBuffer) text;
if (buffer.hasArray() && !buffer.isReadOnly()) {
final int position = buffer.position();
return Character.codePointCount(buffer.array(), position + fromIndex, position + toIndex);
}
}
return Character.codePointCount(text, fromIndex, toIndex);
}
/**
* Returns the number of occurrences of the {@code toSearch} string in the given {@code text}.
* The search is case-sensitive.
*
* @param text the character sequence to count occurrences, or {@code null}.
* @param toSearch the string to search in the given {@code text}.
* It shall contain at least one character.
* @return the number of occurrences of {@code toSearch} in {@code text},
* or 0 if {@code text} was null or empty.
* @throws NullArgumentException if the {@code toSearch} argument is null.
* @throws IllegalArgumentException if the {@code toSearch} argument is empty.
*/
public static int count(final CharSequence text, final String toSearch) {
ArgumentChecks.ensureNonEmpty("toSearch", toSearch);
final int length = toSearch.length();
if (length == 1) {
// Implementation working on a single character is faster.
return count(text, toSearch.charAt(0));
}
int n = 0;
if (text != null) {
int i = 0;
while ((i = indexOf(text, toSearch, i, text.length())) >= 0) {
n++;
i += length;
}
}
return n;
}
/**
* Counts the number of occurrence of the given character in the given character sequence.
*
* @param text the character sequence to count occurrences, or {@code null}.
* @param toSearch the character to count.
* @return the number of occurrences of the given character, or 0 if the {@code text} is null.
*/
public static int count(final CharSequence text, final char toSearch) {
int n = 0;
if (text != null) {
if (text instanceof String) {
final String s = (String) text;
for (int i=s.indexOf(toSearch); ++i != 0; i=s.indexOf(toSearch, i)) {
n++;
}
} else {
// No need to use the code point API here, since we are looking for exact matches.
for (int i=text.length(); --i>=0;) {
if (text.charAt(i) == toSearch) {
n++;
}
}
}
}
return n;
}
/**
* Returns the index within the given strings of the first occurrence of the specified part,
* starting at the specified index. This method is equivalent to the following method call,
* except that this method works on arbitrary {@link CharSequence} objects instead of
* {@link String}s only, and that the upper limit can be specified:
*
* {@preformat java
* return text.indexOf(part, fromIndex);
* }
*
* There is no restriction on the value of {@code fromIndex}. If negative or greater
* than {@code toIndex}, then the behavior of this method is as if the search started
* from 0 or {@code toIndex} respectively. This is consistent with the
* {@link String#indexOf(String, int)} behavior.
*
* @param text the string in which to perform the search.
* @param toSearch the substring for which to search.
* @param fromIndex the index from which to start the search.
* @param toIndex the index after the last character where to perform the search.
* @return the index within the text of the first occurrence of the specified part, starting at the specified index,
* or -1 if no occurrence has been found or if the {@code text} argument is null.
* @throws NullArgumentException if the {@code toSearch} argument is null.
* @throws IllegalArgumentException if the {@code toSearch} argument is empty.
*
* @see String#indexOf(String, int)
* @see StringBuilder#indexOf(String, int)
* @see StringBuffer#indexOf(String, int)
*/
public static int indexOf(final CharSequence text, final CharSequence toSearch, int fromIndex, int toIndex) {
ArgumentChecks.ensureNonEmpty("toSearch", toSearch);
if (text != null) {
int length = text.length();
if (toIndex > length) {
toIndex = length;
}
if (toSearch instanceof String && toIndex == length) {
if (text instanceof String) {
return ((String) text).indexOf((String) toSearch, fromIndex);
}
if (text instanceof StringBuilder) {
return ((StringBuilder) text).indexOf((String) toSearch, fromIndex);
}
if (text instanceof StringBuffer) {
return ((StringBuffer) text).indexOf((String) toSearch, fromIndex);
}
}
if (fromIndex < 0) {
fromIndex = 0;
}
length = toSearch.length();
toIndex -= length;
search: for (; fromIndex <= toIndex; fromIndex++) {
for (int i=0; i<length; i++) {
// No need to use the codePointAt API here, since we are looking for exact matches.
if (text.charAt(fromIndex + i) != toSearch.charAt(i)) {
continue search;
}
}
return fromIndex;
}
}
return -1;
}
/**
* Returns the index within the given character sequence of the first occurrence of the
* specified character, starting the search at the specified index. If the character is
* not found, then this method returns -1.
*
* <p>There is no restriction on the value of {@code fromIndex}. If negative or greater
* than {@code toIndex}, then the behavior of this method is as if the search started
* from 0 or {@code toIndex} respectively. This is consistent with the behavior documented
* in {@link String#indexOf(int, int)}.</p>
*
* @param text the character sequence in which to perform the search, or {@code null}.
* @param toSearch the Unicode code point of the character to search.
* @param fromIndex the index to start the search from.
* @param toIndex the index after the last character where to perform the search.
* @return the index of the first occurrence of the given character in the specified sub-sequence,
* or -1 if no occurrence has been found or if the {@code text} argument is null.
*
* @see String#indexOf(int, int)
*/
public static int indexOf(final CharSequence text, final int toSearch, int fromIndex, int toIndex) {
if (text != null) {
final int length = text.length();
if (toIndex >= length) {
if (text instanceof String) {
// String provides a faster implementation.
return ((String) text).indexOf(toSearch, fromIndex);
}
toIndex = length;
}
if (fromIndex < 0) {
fromIndex = 0;
}
char head = (char) toSearch;
char tail = (char) 0;
if (head != toSearch) { // Outside BMP plane?
head = highSurrogate(toSearch);
tail = lowSurrogate (toSearch);
toIndex--;
}
while (fromIndex < toIndex) {
if (text.charAt(fromIndex) == head) {
if (tail == 0 || text.charAt(fromIndex+1) == tail) {
return fromIndex;
}
}
fromIndex++;
}
}
return -1;
}
/**
* Returns the index within the given character sequence of the last occurrence of the
* specified character, searching backward in the given index range.
* If the character is not found, then this method returns -1.
*
* <p>There is no restriction on the value of {@code toIndex}. If greater than the text length
* or less than {@code fromIndex}, then the behavior of this method is as if the search started
* from {@code length} or {@code fromIndex} respectively. This is consistent with the behavior
* documented in {@link String#lastIndexOf(int, int)}.</p>
*
* @param text the character sequence in which to perform the search, or {@code null}.
* @param toSearch the Unicode code point of the character to search.
* @param fromIndex the index of the first character in the range where to perform the search.
* @param toIndex the index after the last character in the range where to perform the search.
* @return the index of the last occurrence of the given character in the specified sub-sequence,
* or -1 if no occurrence has been found or if the {@code text} argument is null.
*
* @see String#lastIndexOf(int, int)
*/
public static int lastIndexOf(final CharSequence text, final int toSearch, int fromIndex, int toIndex) {
if (text != null) {
if (fromIndex <= 0) {
if (text instanceof String) {
// String provides a faster implementation.
return ((String) text).lastIndexOf(toSearch, toIndex - 1);
}
fromIndex = 0;
}
final int length = text.length();
if (toIndex > length) {
toIndex = length;
}
char tail = (char) toSearch;
char head = (char) 0;
if (tail != toSearch) { // Outside BMP plane?
tail = lowSurrogate (toSearch);
head = highSurrogate(toSearch);
fromIndex++;
}
while (toIndex > fromIndex) {
if (text.charAt(--toIndex) == tail) {
if (head == 0 || text.charAt(--toIndex) == head) {
return toIndex;
}
}
}
}
return -1;
}
/**
* Returns the index of the first character after the given number of lines.
* This method counts the number of occurrence of {@code '\n'}, {@code '\r'}
* or {@code "\r\n"} starting from the given position. When {@code numLines}
* occurrences have been found, the index of the first character after the last
* occurrence is returned.
*
* <p>If the {@code numLines} argument is positive, this method searches forward.
* If negative, this method searches backward. If 0, this method returns the
* beginning of the current line.</p>
*
* <p>If this method reaches the end of {@code text} while searching forward, then
* {@code text.length()} is returned. If this method reaches the beginning of
* {@code text} while searching backward, then 0 is returned.</p>
*
* @param text the string in which to skip a determined amount of lines.
* @param numLines the number of lines to skip. Can be positive, zero or negative.
* @param fromIndex index at which to start the search, from 0 to {@code text.length()} inclusive.
* @return index of the first character after the last skipped line.
* @throws NullPointerException if the {@code text} argument is null.
* @throws IndexOutOfBoundsException if {@code fromIndex} is out of bounds.
*/
public static int indexOfLineStart(final CharSequence text, int numLines, int fromIndex) {
final int length = text.length();
/*
* Go backward if the number of lines is negative.
* No need to use the codePoint API because we are
* looking only for characters in the BMP plane.
*/
if (numLines <= 0) {
do {
char c;
do {
if (fromIndex == 0) {
return fromIndex;
}
c = text.charAt(--fromIndex);
if (c == '\n') {
if (fromIndex != 0 && text.charAt(fromIndex - 1) == '\r') {
--fromIndex;
}
break;
}
} while (c != '\r');
} while (++numLines != 1);
// Execute the forward code below for skipping the "end of line" characters.
}
/*
* Skips forward the given amount of lines.
*/
while (--numLines >= 0) {
char c;
do {
if (fromIndex == length) {
return fromIndex;
}
c = text.charAt(fromIndex++);
if (c == '\r') {
if (fromIndex != length && text.charAt(fromIndex) == '\n') {
fromIndex++;
}
break;
}
} while (c != '\n');
}
return fromIndex;
}
/**
* Returns the index of the first non-white character in the given range.
* If the given range contains only space characters, then this method returns the index of the
* first character after the given range, which is always equals or greater than {@code toIndex}.
* Note that this character may not exist if {@code toIndex} is equals to the text length.
*
* <p>Special cases:</p>
* <ul>
* <li>If {@code fromIndex} is greater than {@code toIndex},
* then this method unconditionally returns {@code fromIndex}.</li>
* <li>If the given range contains only space characters and the character at {@code toIndex-1}
* is the high surrogate of a valid supplementary code point, then this method returns
* {@code toIndex+1}, which is the index of the next code point.</li>
* <li>If {@code fromIndex} is negative or {@code toIndex} is greater than the text length,
* then the behavior of this method is undefined.</li>
* </ul>
*
* Space characters are identified by the {@link Character#isWhitespace(int)} method.
*
* @param text the string in which to perform the search (can not be null).
* @param fromIndex the index from which to start the search (can not be negative).
* @param toIndex the index after the last character where to perform the search.
* @return the index within the text of the first occurrence of a non-space character, starting
* at the specified index, or a value equals or greater than {@code toIndex} if none.
* @throws NullPointerException if the {@code text} argument is null.
*
* @see #skipTrailingWhitespaces(CharSequence, int, int)
* @see #trimWhitespaces(CharSequence)
* @see String#stripLeading()
*/
public static int skipLeadingWhitespaces(final CharSequence text, int fromIndex, final int toIndex) {
while (fromIndex < toIndex) {
final int c = codePointAt(text, fromIndex);
if (!isWhitespace(c)) break;
fromIndex += charCount(c);
}
return fromIndex;
}
/**
* Returns the index <em>after</em> the last non-white character in the given range.
* If the given range contains only space characters, then this method returns the index of the
* first character in the given range, which is always equals or lower than {@code fromIndex}.
*
* <p>Special cases:</p>
* <ul>
* <li>If {@code fromIndex} is lower than {@code toIndex},
* then this method unconditionally returns {@code toIndex}.</li>
* <li>If the given range contains only space characters and the character at {@code fromIndex}
* is the low surrogate of a valid supplementary code point, then this method returns
* {@code fromIndex-1}, which is the index of the code point.</li>
* <li>If {@code fromIndex} is negative or {@code toIndex} is greater than the text length,
* then the behavior of this method is undefined.</li>
* </ul>
*
* Space characters are identified by the {@link Character#isWhitespace(int)} method.
*
* @param text the string in which to perform the search (can not be null).
* @param fromIndex the index from which to start the search (can not be negative).
* @param toIndex the index after the last character where to perform the search.
* @return the index within the text of the last occurrence of a non-space character, starting
* at the specified index, or a value equals or lower than {@code fromIndex} if none.
* @throws NullPointerException if the {@code text} argument is null.
*
* @see #skipLeadingWhitespaces(CharSequence, int, int)
* @see #trimWhitespaces(CharSequence)
* @see String#stripTrailing()
*/
public static int skipTrailingWhitespaces(final CharSequence text, final int fromIndex, int toIndex) {
while (toIndex > fromIndex) {
final int c = codePointBefore(text, toIndex);
if (!isWhitespace(c)) break;
toIndex -= charCount(c);
}
return toIndex;
}
/**
* Allocates the array to be returned by the {@code split(…)} methods. If the given {@code text} argument is
* an instance of {@link String}, {@link StringBuilder} or {@link StringBuffer}, then this method returns a
* {@code String[]} array instead of {@code CharSequence[]}. This is possible because the specification of
* their {@link CharSequence#subSequence(int, int)} method guarantees to return {@code String} instances.
* Some Apache SIS code will cast the {@code split(…)} return value based on this knowledge.
*
* <p>Note that this is a undocumented SIS features. There is currently no commitment that this implementation
* details will not change in future version.</p>
*
* @param text the text to be splitted.
* @return an array where to store the result of splitting the given {@code text}.
*/
private static CharSequence[] createSplitArray(final CharSequence text) {
return (text instanceof String ||
text instanceof StringBuilder ||
text instanceof StringBuffer) ? new String[8] : new CharSequence[8];
}
/**
* Splits a text around the given character. The array returned by this method contains all
* subsequences of the given text that is terminated by the given character or is terminated
* by the end of the text. The subsequences in the array are in the order in which they occur
* in the given text. If the character is not found in the input, then the resulting array has
* just one element, which is the whole given text.
*
* <p>This method is similar to the standard {@link String#split(String)} method except for the
* following:</p>
*
* <ul>
* <li>It accepts generic character sequences.</li>
* <li>It accepts {@code null} argument, in which case an empty array is returned.</li>
* <li>The separator is a simple character instead of a regular expression.</li>
* <li>If the {@code separator} argument is {@code '\n'} or {@code '\r'}, then this method
* splits around any of {@code "\r"}, {@code "\n"} or {@code "\r\n"} characters sequences.
* <li>The leading and trailing spaces of each subsequences are trimmed.</li>
* </ul>
*
* @param text the text to split, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @return the array of subsequences computed by splitting the given text around the given
* character, or an empty array if {@code text} was null.
*
* @see String#split(String)
*/
@SuppressWarnings("ReturnOfCollectionOrArrayField")
public static CharSequence[] split(final CharSequence text, final char separator) {
if (text == null) {
return EMPTY_ARRAY;
}
if (separator == '\n' || separator == '\r') {
final CharSequence[] splitted = splitOnEOL(text);
for (int i=0; i < splitted.length; i++) {
// For consistency with the rest of this method.
splitted[i] = trimWhitespaces(splitted[i]);
}
return splitted;
}
// 'excludeEmpty' must use the same criterion than trimWhitespaces(…).
final boolean excludeEmpty = isWhitespace(separator);
CharSequence[] splitted = createSplitArray(text);
final int length = text.length();
int count = 0, last = 0, i = 0;
while ((i = indexOf(text, separator, i, length)) >= 0) {
final CharSequence item = trimWhitespaces(text, last, i);
if (!excludeEmpty || item.length() != 0) {
if (count == splitted.length) {
splitted = Arrays.copyOf(splitted, count << 1);
}
splitted[count++] = item;
}
last = ++i;
}
// Add the last element.
final CharSequence item = trimWhitespaces(text, last, length);
if (!excludeEmpty || item.length() != 0) {
if (count == splitted.length) {
splitted = Arrays.copyOf(splitted, count + 1);
}
splitted[count++] = item;
}
return ArraysExt.resize(splitted, count);
}
/**
* Splits a text around the <cite>End Of Line</cite> (EOL) characters.
* EOL characters can be any of {@code "\r"}, {@code "\n"} or {@code "\r\n"} sequences.
* Each element in the returned array will be a single line. If the given text is already
* a single line, then this method returns a singleton containing only the given text.
*
* <p>Notes:</p>
* <ul>
* <li>At the difference of <code>{@linkplain #split split}(toSplit, '\n’)</code>,
* this method does not remove whitespaces.</li>
* <li>This method does not check for Unicode
* {@linkplain Characters#LINE_SEPARATOR line separator} and
* {@linkplain Characters#PARAGRAPH_SEPARATOR paragraph separator}.</li>
* </ul>
*
* <div class="note"><b>Performance note:</b>
* Prior JDK8 this method was usually cheap because all string instances created by
* {@link String#substring(int,int)} shared the same {@code char[]} internal array.
* However since JDK8, the new {@code String} implementation copies the data in new arrays.
* Consequently it is better to use index rather than this method for splitting large {@code String}s.
* However this method still useful for other {@link CharSequence} implementations providing an efficient
* {@code subSequence(int,int)} method.</div>
*
* @param text the multi-line text from which to get the individual lines, or {@code null}.
* @return the lines in the text, or an empty array if the given text was null.
*
* @see #indexOfLineStart(CharSequence, int, int)
*/
@SuppressWarnings("ReturnOfCollectionOrArrayField")
public static CharSequence[] splitOnEOL(final CharSequence text) {
if (text == null) {
return EMPTY_ARRAY;
}
/*
* This method is implemented on top of String.indexOf(int,int),
* assuming that it will be faster for String and StringBuilder.
*/
final int length = text.length();
int lf = indexOf(text, '\n', 0, length);
int cr = indexOf(text, '\r', 0, length);
if (lf < 0 && cr < 0) {
return new CharSequence[] {
text
};
}
int count = 0;
CharSequence[] splitted = createSplitArray(text);
int last = 0;
boolean hasMore;
do {
int skip = 1;
final int splitAt;
if (cr < 0) {
// There is no "\r" character in the whole text, only "\n".
splitAt = lf;
hasMore = (lf = indexOf(text, '\n', lf+1, length)) >= 0;
} else if (lf < 0) {
// There is no "\n" character in the whole text, only "\r".
splitAt = cr;
hasMore = (cr = indexOf(text, '\r', cr+1, length)) >= 0;
} else if (lf < cr) {
// There is both "\n" and "\r" characters with "\n" first.
splitAt = lf;
hasMore = true;
lf = indexOf(text, '\n', lf+1, length);
} else {
// There is both "\r" and "\n" characters with "\r" first.
// We need special care for the "\r\n" sequence.
splitAt = cr;
if (lf == ++cr) {
cr = indexOf(text, '\r', cr+1, length);
lf = indexOf(text, '\n', lf+1, length);
hasMore = (cr >= 0 || lf >= 0);
skip = 2;
} else {
cr = indexOf(text, '\r', cr+1, length);
hasMore = true; // Because there is lf.
}
}
if (count >= splitted.length) {
splitted = Arrays.copyOf(splitted, count*2);
}
splitted[count++] = text.subSequence(last, splitAt);
last = splitAt + skip;
} while (hasMore);
/*
* Add the remaining string and we are done.
*/
if (count >= splitted.length) {
splitted = Arrays.copyOf(splitted, count+1);
}
splitted[count++] = text.subSequence(last, text.length());
return ArraysExt.resize(splitted, count);
}
/**
* Returns {@code true} if {@link #split(CharSequence, char)} parsed an empty string.
*/
private static boolean isEmpty(final CharSequence[] tokens) {
switch (tokens.length) {
case 0: return true;
case 1: return tokens[0].length() == 0;
default: return false;
}
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Double#parseDouble(String) parses} each item as a {@code double}.
* Empty sub-sequences are parsed as {@link Double#NaN}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static double[] parseDoubles(final CharSequence values, final char separator)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_DOUBLE;
final double[] parsed = new double[tokens.length];
for (int i=0; i<tokens.length; i++) {
final String token = trimWhitespaces(tokens[i]).toString();
parsed[i] = token.isEmpty() ? Double.NaN : Double.parseDouble(token);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Float#parseFloat(String) parses} each item as a {@code float}.
* Empty sub-sequences are parsed as {@link Float#NaN}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static float[] parseFloats(final CharSequence values, final char separator)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_FLOAT;
final float[] parsed = new float[tokens.length];
for (int i=0; i<tokens.length; i++) {
final String token = trimWhitespaces(tokens[i]).toString();
parsed[i] = token.isEmpty() ? Float.NaN : Float.parseFloat(token);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Long#parseLong(String) parses} each item as a {@code long}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @param radix the radix to be used for parsing. This is usually 10.
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static long[] parseLongs(final CharSequence values, final char separator, final int radix)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_LONG;
final long[] parsed = new long[tokens.length];
for (int i=0; i<tokens.length; i++) {
parsed[i] = Long.parseLong(trimWhitespaces(tokens[i]).toString(), radix);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Integer#parseInt(String) parses} each item as an {@code int}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @param radix the radix to be used for parsing. This is usually 10.
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static int[] parseInts(final CharSequence values, final char separator, final int radix)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_INT;
final int[] parsed = new int[tokens.length];
for (int i=0; i<tokens.length; i++) {
parsed[i] = Integer.parseInt(trimWhitespaces(tokens[i]).toString(), radix);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Short#parseShort(String) parses} each item as a {@code short}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @param radix the radix to be used for parsing. This is usually 10.
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static short[] parseShorts(final CharSequence values, final char separator, final int radix)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_SHORT;
final short[] parsed = new short[tokens.length];
for (int i=0; i<tokens.length; i++) {
parsed[i] = Short.parseShort(trimWhitespaces(tokens[i]).toString(), radix);
}
return parsed;
}
/**
* {@linkplain #split(CharSequence, char) Splits} the given text around the given character,
* then {@linkplain Byte#parseByte(String) parses} each item as a {@code byte}.
*
* @param values the text containing the values to parse, or {@code null}.
* @param separator the delimiting character (typically the coma).
* @param radix the radix to be used for parsing. This is usually 10.
* @return the array of numbers parsed from the given text,
* or an empty array if {@code values} was null.
* @throws NumberFormatException if at least one number can not be parsed.
*/
public static byte[] parseBytes(final CharSequence values, final char separator, final int radix)
throws NumberFormatException
{
final CharSequence[] tokens = split(values, separator);
if (isEmpty(tokens)) return ArraysExt.EMPTY_BYTE;
final byte[] parsed = new byte[tokens.length];
for (int i=0; i<tokens.length; i++) {
parsed[i] = Byte.parseByte(trimWhitespaces(tokens[i]).toString(), radix);
}
return parsed;
}
/**
* Replaces some Unicode characters by ASCII characters on a "best effort basis".
* For example the “ é ” character is replaced by “ e ” (without accent),
* the “ ″ ” symbol for minutes of angle is replaced by straight double quotes “ " ”,
* and combined characters like ㎏, ㎎, ㎝, ㎞, ㎢, ㎦, ㎖, ㎧, ㎩, ㎐, <i>etc.</i> are replaced
* by the corresponding sequences of characters.
*
* <div class="note"><b>Note:</b>
* the replacement of Greek letters is a more complex task than what this method can do,
* since it depends on the context. For example if the Greek letters are abbreviations
* for coordinate system axes like φ and λ, then the replacements depend on the enclosing
* coordinate system. See {@link org.apache.sis.io.wkt.Transliterator} for more information.</div>
*
* @param text the text to scan for Unicode characters to replace by ASCII characters, or {@code null}.
* @return the given text with substitutions applied, or {@code text} if no replacement
* has been applied, or {@code null} if the given text was null.
*
* @see StringBuilders#toASCII(StringBuilder)
* @see org.apache.sis.io.wkt.Transliterator#filter(String)
* @see java.text.Normalizer
*/
public static CharSequence toASCII(final CharSequence text) {
return StringBuilders.toASCII(text, null);
}
/**
* Returns a string with leading and trailing whitespace characters omitted.
* This method is similar in purpose to {@link String#trim()}, except that the later considers
* every {@linkplain Character#isISOControl(int) ISO control codes} below 32 to be a whitespace.
* That {@code String.trim()} behavior has the side effect of removing the heading of ANSI escape
* sequences (a.k.a. X3.64), and to ignore Unicode spaces. This {@code trimWhitespaces(…)} method
* is built on the more accurate {@link Character#isWhitespace(int)} method instead.
*
* <p>This method performs the same work than {@link #trimWhitespaces(CharSequence)},
* but is overloaded for the {@code String} type because of its frequent use.</p>
*
* @param text the text from which to remove leading and trailing whitespaces, or {@code null}.
* @return a string with leading and trailing whitespaces removed, or {@code null} is the given
* text was null.
*
* @todo To be replaced by {@link String#strip()} in JDK 11.
*/
public static String trimWhitespaces(String text) {
if (text != null) {
final int length = text.length();
final int lower = skipLeadingWhitespaces(text, 0, length);
text = text.substring(lower, skipTrailingWhitespaces(text, lower, length));
}
return text;
}
/**
* Returns a text with leading and trailing whitespace characters omitted.
* Space characters are identified by the {@link Character#isWhitespace(int)} method.
*
* <p>This method is the generic version of {@link #trimWhitespaces(String)}.</p>
*
* @param text the text from which to remove leading and trailing whitespaces, or {@code null}.
* @return a characters sequence with leading and trailing whitespaces removed,
* or {@code null} is the given text was null.
*
* @see #skipLeadingWhitespaces(CharSequence, int, int)
* @see #skipTrailingWhitespaces(CharSequence, int, int)
* @see String#strip()
*/
public static CharSequence trimWhitespaces(CharSequence text) {
if (text != null) {
text = trimWhitespaces(text, 0, text.length());
}
return text;
}
/**
* Returns a sub-sequence with leading and trailing whitespace characters omitted.
* Space characters are identified by the {@link Character#isWhitespace(int)} method.
*
* <p>Invoking this method is functionally equivalent to the following code snippet,
* except that the {@link CharSequence#subSequence(int, int) subSequence} method is
* invoked only once instead of two times:</p>
*
* {@preformat java
* text = trimWhitespaces(text.subSequence(lower, upper));
* }
*
* @param text the text from which to remove leading and trailing white spaces.
* @param lower index of the first character to consider for inclusion in the sub-sequence.
* @param upper index after the last character to consider for inclusion in the sub-sequence.
* @return a characters sequence with leading and trailing white spaces removed, or {@code null}
* if the {@code text} argument is null.
* @throws IndexOutOfBoundsException if {@code lower} or {@code upper} is out of bounds.
*/
public static CharSequence trimWhitespaces(CharSequence text, int lower, int upper) {
final int length = length(text);
ArgumentChecks.ensureValidIndexRange(length, lower, upper);
if (text != null) {
lower = skipLeadingWhitespaces (text, lower, upper);
upper = skipTrailingWhitespaces(text, lower, upper);
if (lower != 0 || upper != length) { // Safety in case subSequence doesn't make the check.
text = text.subSequence(lower, upper);
}
}
return text;
}
/**
* Trims the fractional part of the given formatted number, provided that it doesn't change
* the value. This method assumes that the number is formatted in the US locale, typically
* by the {@link Double#toString(double)} method.
*
* <p>More specifically if the given value ends with a {@code '.'} character followed by a
* sequence of {@code '0'} characters, then those characters are omitted. Otherwise this
* method returns the text unchanged. This is a <cite>"all or nothing"</cite> method:
* either the fractional part is completely removed, or either it is left unchanged.</p>
*
* <h4>Examples</h4>
* This method returns {@code "4"} if the given value is {@code "4."}, {@code "4.0"} or
* {@code "4.00"}, but returns {@code "4.10"} unchanged (including the trailing {@code '0'}
* character) if the input is {@code "4.10"}.
*
* <h4>Use case</h4>
* This method is useful before to {@linkplain Integer#parseInt(String) parse a number}
* if that number should preferably be parsed as an integer before attempting to parse
* it as a floating point number.
*
* @param value the value to trim if possible, or {@code null}.
* @return the value without the trailing {@code ".0"} part (if any),
* or {@code null} if the given text was null.
*
* @see StringBuilders#trimFractionalPart(StringBuilder)
*/
public static CharSequence trimFractionalPart(final CharSequence value) {
if (value != null) {
for (int i=value.length(); i>0;) {
final int c = codePointBefore(value, i);
i -= charCount(c);
switch (c) {
case '0': continue;
case '.': return value.subSequence(0, i);
default : return value;
}
}
}
return value;
}
/**
* Makes sure that the {@code text} string is not longer than {@code maxLength} characters.
* If {@code text} is not longer, then it is returned unchanged. Otherwise this method returns
* a copy of {@code text} with some characters substituted by the {@code "(…)"} string.
*
* <p>If the text needs to be shortened, then this method tries to apply the above-cited
* substitution between two words. For example, the following text:</p>
*
* <blockquote>
* "This sentence given as an example is way too long to be included in a short name."
* </blockquote>
*
* May be shortened to something like this:
*
* <blockquote>
* "This sentence given (…) in a short name."
* </blockquote>
*
* @param text the sentence to reduce if it is too long, or {@code null}.
* @param maxLength the maximum length allowed for {@code text}.
* @return a sentence not longer than {@code maxLength}, or {@code null} if the given text was null.
*/
public static CharSequence shortSentence(CharSequence text, final int maxLength) {
ArgumentChecks.ensureStrictlyPositive("maxLength", maxLength);
if (text != null) {
final int length = text.length();
int toRemove = length - maxLength;
if (toRemove > 0) {
toRemove += 5; // Space needed for the " (…) " string.
/*
* We will remove characters from 'lower' to 'upper' both exclusive. We try to
* adjust 'lower' and 'upper' in such a way that the first and last characters
* to be removed will be spaces or punctuation characters.
*/
int lower = length >>> 1;
if (lower != 0 && isLowSurrogate(text.charAt(lower))) {
lower--;
}
int upper = lower;
boolean forward = false;
do { // To be run as long as we need to remove more characters.
int nc=0, type=UNASSIGNED;
forward = !forward;
searchWordBreak: while (true) {
final int c;
if (forward) {
if ((upper += nc) == length) break;
c = codePointAt(text, upper);
} else {
if ((lower -= nc) == 0) break;
c = codePointBefore(text, lower);
}
nc = charCount(c);
if (isWhitespace(c)) {
if (type != UNASSIGNED) {
type = SPACE_SEPARATOR;
}
} else switch (type) {
// After we skipped white, then non-white, then white characters, stop.
case SPACE_SEPARATOR: {
break searchWordBreak;
}
// For the first non-white character, just remember its type.
// Arbitrarily use UPPERCASE_LETTER for any kind of identifier
// part (which include UPPERCASE_LETTER anyway).
case UNASSIGNED: {
type = isUnicodeIdentifierPart(c) ? UPPERCASE_LETTER : getType(c);
break;
}
// If we expected an identifier, stop at the first other char.
case UPPERCASE_LETTER: {
if (!isUnicodeIdentifierPart(c)) {
break searchWordBreak;
}
break;
}
// For all other kind of character, break when the type change.
default: {
if (getType(c) != type) {
break searchWordBreak;
}
break;
}
}
toRemove -= nc;
}
} while (toRemove > 0);
text = new StringBuilder(lower + (length-upper) + 5) // 5 is the length of " (…) "
.append(text, 0, lower).append(" (…) ").append(text, upper, length);
}
}
return text;
}
/**
* Given a string in upper cases (typically a Java constant), returns a string formatted
* like an English sentence. This heuristic method performs the following steps:
*
* <ol>
* <li>Replace all occurrences of {@code '_'} by spaces.</li>
* <li>Converts all letters except the first one to lower case letters using
* {@link Character#toLowerCase(int)}. Note that this method does not use
* the {@link String#toLowerCase()} method. Consequently the system locale
* is ignored. This method behaves as if the conversion were done in the
* {@linkplain java.util.Locale#ROOT root} locale.</li>
* </ol>
*
* <p>Note that those heuristic rules may be modified in future SIS versions,
* depending on the practical experience gained.</p>
*
* @param identifier the name of a Java constant, or {@code null}.
* @return the identifier like an English sentence, or {@code null}
* if the given {@code identifier} argument was null.
*/
public static CharSequence upperCaseToSentence(final CharSequence identifier) {
if (identifier == null) {
return null;
}
final StringBuilder buffer = new StringBuilder(identifier.length());
final int length = identifier.length();
for (int i=0; i<length;) {
int c = codePointAt(identifier, i);
if (i != 0) {
if (c == '_') {
c = ' ';
} else {
c = toLowerCase(c);
}
}
buffer.appendCodePoint(c);
i += charCount(c);
}
return buffer;
}
/**
* Given a string in camel cases (typically an identifier), returns a string formatted
* like an English sentence. This heuristic method performs the following steps:
*
* <ol>
* <li>Invoke {@link #camelCaseToWords(CharSequence, boolean)}, which separate the words
* on the basis of character case. For example {@code "transferFunctionType"} become
* <cite>"transfer function type"</cite>. This works fine for ISO 19115 identifiers.</li>
*
* <li>Next replace all occurrence of {@code '_'} by spaces in order to take in account
* an other common naming convention, which uses {@code '_'} as a word separator. This
* convention is used by netCDF attributes like {@code "project_name"}.</li>
*
* <li>Finally ensure that the first character is upper-case.</li>
* </ol>
*
* <h4>Exception to the above rules</h4>
* If the given identifier contains only upper-case letters, digits and the {@code '_'} character,
* then the identifier is returned "as is" except for the {@code '_'} characters which are replaced by {@code '-'}.
* This work well for identifiers like {@code "UTF-8"} or {@code "ISO-LATIN-1"} for instance.
*
* <p>Note that those heuristic rules may be modified in future SIS versions,
* depending on the practical experience gained.</p>
*
* @param identifier an identifier with no space, words begin with an upper-case character, or {@code null}.
* @return the identifier with spaces inserted after what looks like words, or {@code null}
* if the given {@code identifier} argument was null.
*/
public static CharSequence camelCaseToSentence(final CharSequence identifier) {
if (identifier == null) {
return null;
}
final StringBuilder buffer;
if (isCode(identifier)) {
if (identifier instanceof String) {
return ((String) identifier).replace('_', '-');
}
buffer = new StringBuilder(identifier);
StringBuilders.replace(buffer, '_', '-');
} else {
buffer = (StringBuilder) camelCaseToWords(identifier, true);
final int length = buffer.length();
if (length != 0) {
StringBuilders.replace(buffer, '_', ' ');
final int c = buffer.codePointAt(0);
final int up = toUpperCase(c);
if (c != up) {
StringBuilders.replace(buffer, 0, charCount(c), toChars(up));
}
}
}
return buffer;
}
/**
* Given a string in camel cases, returns a string with the same words separated by spaces.
* A word begins with a upper-case character following a lower-case character. For example
* if the given string is {@code "PixelInterleavedSampleModel"}, then this method returns
* <cite>"Pixel Interleaved Sample Model"</cite> or <cite>"Pixel interleaved sample model"</cite>
* depending on the value of the {@code toLowerCase} argument.
*
* <p>If {@code toLowerCase} is {@code false}, then this method inserts spaces but does not change
* the case of characters. If {@code toLowerCase} is {@code true}, then this method changes
* {@linkplain Character#toLowerCase(int) to lower case} the first character after each spaces
* inserted by this method (note that this intentionally exclude the very first character in
* the given string), except if the second character {@linkplain Character#isUpperCase(int)
* is upper case}, in which case the word is assumed an acronym.</p>
*
* <p>The given string is usually a programmatic identifier like a class name or a method name.</p>
*
* @param identifier an identifier with no space, words begin with an upper-case character.
* @param toLowerCase {@code true} for changing the first character of words to lower case,
* except for the first word and acronyms.
* @return the identifier with spaces inserted after what looks like words, or {@code null}
* if the given {@code identifier} argument was null.
*/
public static CharSequence camelCaseToWords(final CharSequence identifier, final boolean toLowerCase) {
if (identifier == null) {
return null;
}
/*
* Implementation note: the 'camelCaseToSentence' method needs
* this method to unconditionally returns a new StringBuilder.
*/
final int length = identifier.length();
final StringBuilder buffer = new StringBuilder(length + 8);
final int lastIndex = (length != 0) ? length - charCount(codePointBefore(identifier, length)) : 0;
int last = 0;
for (int i=1; i<=length;) {
final int cp;
final boolean doAppend;
if (i == length) {
cp = 0;
doAppend = true;
} else {
cp = codePointAt(identifier, i);
doAppend = Character.isUpperCase(cp) && isLowerCase(codePointBefore(identifier, i));
}
if (doAppend) {
final int pos = buffer.length();
buffer.append(identifier, last, i).append(' ');
if (toLowerCase && pos!=0 && last<lastIndex && isLowerCase(codePointAfter(identifier, last))) {
final int c = buffer.codePointAt(pos);
final int low = toLowerCase(c);
if (c != low) {
StringBuilders.replace(buffer, pos, pos + charCount(c), toChars(low));
}
}
last = i;
}
i += charCount(cp);
}
/*
* Removes the trailing space, if any.
*/
final int lg = buffer.length();
if (lg != 0) {
final int cp = buffer.codePointBefore(lg);
if (isWhitespace(cp)) {
buffer.setLength(lg - charCount(cp));
}
}
return buffer;
}
/**
* Creates an acronym from the given text. This method returns a string containing the first character of each word,
* where the words are separated by the camel case convention, the {@code '_'} character, or any character which is
* not a {@linkplain Character#isUnicodeIdentifierPart(int) Unicode identifier part} (including spaces).
*
* <p>An exception to the above rule happens if the given text is a Unicode identifier without the {@code '_'}
* character, and every characters are upper case. In such case the text is returned unchanged on the assumption
* that it is already an acronym.</p>
*
* <p><b>Examples:</b> given {@code "northEast"}, this method returns {@code "NE"}.
* Given {@code "Open Geospatial Consortium"}, this method returns {@code "OGC"}.</p>
*
* @param text the text for which to create an acronym, or {@code null}.
* @return the acronym, or {@code null} if the given text was null.
*/
public static CharSequence camelCaseToAcronym(CharSequence text) {
text = trimWhitespaces(text);
if (text != null && !isAcronym(text)) {
final int length = text.length();
final StringBuilder buffer = new StringBuilder(8); // Acronyms are usually short.
boolean wantChar = true;
for (int i=0; i<length;) {
final int c = codePointAt(text, i);
if (wantChar) {
if (isUnicodeIdentifierStart(c)) {
buffer.appendCodePoint(c);
wantChar = false;
}
} else if (!isUnicodeIdentifierPart(c) || c == '_') {
wantChar = true;
} else if (Character.isUpperCase(c)) {
// Test for mixed-case (e.g. "northEast").
// Note that i is guaranteed to be greater than 0 here.
if (!Character.isUpperCase(codePointBefore(text, i))) {
buffer.appendCodePoint(c);
}
}
i += charCount(c);
}
final int acrlg = buffer.length();
if (acrlg != 0) {
/*
* If every characters except the first one are upper-case, ensure that the
* first one is upper-case as well. This is for handling the identifiers which
* are compliant to Java-Beans convention (e.g. "northEast").
*/
if (isUpperCase(buffer, 1, acrlg, true)) {
final int c = buffer.codePointAt(0);
final int up = toUpperCase(c);
if (c != up) {
StringBuilders.replace(buffer, 0, charCount(c), toChars(up));
}
}
if (!equals(text, buffer)) {
text = buffer;
}
}
}
return text;
}
/**
* Returns {@code true} if the first string is likely to be an acronym of the second string.
* An acronym is a sequence of {@linkplain Character#isLetterOrDigit(int) letters or digits}
* built from at least one character of each word in the {@code words} string. More than
* one character from the same word may appear in the acronym, but they must always
* be the first consecutive characters. The comparison is case-insensitive.
*
* <div class="note"><b>Example:</b>
* Given the {@code "Open Geospatial Consortium"} words, the following strings are recognized as acronyms:
* {@code "OGC"}, {@code "ogc"}, {@code "O.G.C."}, {@code "OpGeoCon"}.</div>
*
* If any of the given arguments is {@code null}, this method returns {@code false}.
*
* @param acronym a possible acronym of the sequence of words, or {@code null}.
* @param words the sequence of words, or {@code null}.
* @return {@code true} if the first string is an acronym of the second one.
*/
public static boolean isAcronymForWords(final CharSequence acronym, final CharSequence words) {
final int lga = length(acronym);
int ia=0, ca;
do {
if (ia >= lga) return false;
ca = codePointAt(acronym, ia);
ia += charCount(ca);
} while (!isLetterOrDigit(ca));
final int lgc = length(words);
int ic=0, cc;
do {
if (ic >= lgc) return false;
cc = codePointAt(words, ic);
ic += charCount(cc);
}
while (!isLetterOrDigit(cc));
if (toUpperCase(ca) != toUpperCase(cc)) {
// The first letter must match.
return false;
}
cmp: while (ia < lga) {
if (ic >= lgc) {
// There is more letters in the acronym than in the complete name.
return false;
}
ca = codePointAt(acronym, ia); ia += charCount(ca);
cc = codePointAt(words, ic); ic += charCount(cc);
if (isLetterOrDigit(ca)) {
if (toUpperCase(ca) == toUpperCase(cc)) {
// Acronym letter matches the letter from the complete name.
// Continue the comparison with next letter of both strings.
continue;
}
// Will search for the next word after the 'else' block.
} else do {
if (ia >= lga) break cmp;
ca = codePointAt(acronym, ia);
ia += charCount(ca);
} while (!isLetterOrDigit(ca));
/*
* At this point, 'ca' is the next acronym letter to compare and we
* need to search for the next word in the complete name. We first
* skip remaining letters, then we skip non-letter characters.
*/
boolean skipLetters = true;
do while (isLetterOrDigit(cc) == skipLetters) {
if (ic >= lgc) {
return false;
}
cc = codePointAt(words, ic);
ic += charCount(cc);
} while ((skipLetters = !skipLetters) == false);
// Now that we are aligned on a new word, the first letter must match.
if (toUpperCase(ca) != toUpperCase(cc)) {
return false;
}
}
/*
* Now that we have processed all acronym letters, the complete name can not have
* any additional word. We can only finish the current word and skip trailing non-
* letter characters.
*/
boolean skipLetters = true;
do {
do {
if (ic >= lgc) return true;
cc = codePointAt(words, ic);
ic += charCount(cc);
} while (isLetterOrDigit(cc) == skipLetters);
} while ((skipLetters = !skipLetters) == false);
return false;
}
/**
* Returns {@code true} if the given string contains only upper case letters or digits.
* A few punctuation characters like {@code '_'} and {@code '.'} are also accepted.
*
* <p>This method is used for identifying character strings that are likely to be code
* like {@code "UTF-8"} or {@code "ISO-LATIN-1"}.</p>
*
* @see #isUnicodeIdentifier(CharSequence)
*/
private static boolean isCode(final CharSequence identifier) {
for (int i=identifier.length(); --i>=0;) {
final char c = identifier.charAt(i);
// No need to use the code point API here, since the conditions
// below are requiring the characters to be in the basic plane.
if (!((c >= 'A' && c <= 'Z') || (c >= '-' && c <= ':') || c == '_')) {
return false;
}
}
return true;
}
/**
* Returns {@code true} if the given text is presumed to be an acronym. Acronyms are presumed
* to be valid Unicode identifiers in all upper-case letters and without the {@code '_'} character.
*
* @see #camelCaseToAcronym(CharSequence)
*/
private static boolean isAcronym(final CharSequence text) {
return isUpperCase(text) && indexOf(text, '_', 0, text.length()) < 0 && isUnicodeIdentifier(text);
}
/**
* Returns {@code true} if the given identifier is a legal Unicode identifier.
* This method returns {@code true} if the identifier length is greater than zero,
* the first character is a {@linkplain Character#isUnicodeIdentifierStart(int)
* Unicode identifier start} and all remaining characters (if any) are
* {@linkplain Character#isUnicodeIdentifierPart(int) Unicode identifier parts}.
*
* <h4>Relationship with legal XML identifiers</h4>
* Most legal Unicode identifiers are also legal XML identifiers, but the converse is not true.
* The most noticeable differences are the ‘{@code :}’, ‘{@code -}’ and ‘{@code .}’ characters,
* which are legal in XML identifiers but not in Unicode.
*
* <table class="sis">
* <caption>Characters legal in one set but not in the other</caption>
* <tr><th colspan="2">Not legal in Unicode</th> <th class="sep" colspan="2">Not legal in XML</th></tr>
* <tr><td>{@code :}</td><td>(colon)</td> <td class="sep">{@code µ}</td><td>(micro sign)</td></tr>
* <tr><td>{@code -}</td><td>(hyphen or minus)</td> <td class="sep">{@code ª}</td><td>(feminine ordinal indicator)</td></tr>
* <tr><td>{@code .}</td><td>(dot)</td> <td class="sep">{@code º}</td><td>(masculine ordinal indicator)</td></tr>
* <tr><td>{@code ·}</td><td>(middle dot)</td> <td class="sep">{@code ⁔}</td><td>(inverted undertie)</td></tr>
* <tr>
* <td colspan="2">Many punctuation, symbols, <i>etc</i>.</td>
* <td colspan="2" class="sep">{@linkplain Character#isIdentifierIgnorable(int) Identifier ignorable} characters.</td>
* </tr>
* </table>
*
* Note that the ‘{@code _}’ (underscore) character is legal according both Unicode and XML, while spaces,
* ‘{@code !}’, ‘{@code #}’, ‘{@code *}’, ‘{@code /}’, ‘{@code ?}’ and most other punctuation characters are not.
*
* <h4>Usage in Apache SIS</h4>
* In its handling of {@linkplain org.apache.sis.referencing.ImmutableIdentifier identifiers}, Apache SIS favors
* Unicode identifiers without {@linkplain Character#isIdentifierIgnorable(int) ignorable} characters since those
* identifiers are legal XML identifiers except for the above-cited rarely used characters. As a side effect,
* this policy excludes ‘{@code :}’, ‘{@code -}’ and ‘{@code .}’ which would normally be legal XML identifiers.
* But since those characters could easily be confused with
* {@linkplain org.apache.sis.util.iso.DefaultNameSpace#DEFAULT_SEPARATOR namespace separators},
* this exclusion is considered desirable.
*
* @param identifier the character sequence to test, or {@code null}.
* @return {@code true} if the given character sequence is a legal Unicode identifier.
*
* @see org.apache.sis.referencing.ImmutableIdentifier
* @see org.apache.sis.metadata.iso.citation.Citations#toCodeSpace(Citation)
* @see org.apache.sis.referencing.IdentifiedObjects#getSimpleNameOrIdentifier(IdentifiedObject)
*/
public static boolean isUnicodeIdentifier(final CharSequence identifier) {
final int length = length(identifier);
if (length == 0) {
return false;
}
int c = codePointAt(identifier, 0);
if (!isUnicodeIdentifierStart(c)) {
return false;
}
for (int i=0; (i += charCount(c)) < length;) {
c = codePointAt(identifier, i);
if (!isUnicodeIdentifierPart(c)) {
return false;
}
}
return true;
}
/**
* Returns {@code true} if the given text is non-null, contains at least one upper-case character and
* no lower-case character. Space and punctuation are ignored.
*
* @param text the character sequence to test (may be {@code null}).
* @return {@code true} if non-null, contains at least one upper-case character and no lower-case character.
*
* @see String#toUpperCase()
*
* @since 0.7
*/
public static boolean isUpperCase(final CharSequence text) {
return isUpperCase(text, 0, length(text), false);
}
/**
* Returns {@code true} if the given sub-sequence is non-null, contains at least one upper-case character and
* no lower-case character. Space and punctuation are ignored.
*
* @param text the character sequence to test.
* @param lower index of the first character to check, inclusive.
* @param upper index of the last character to check, exclusive.
* @param hasUpperCase {@code true} if this method should behave as if the given text already had
* at least one upper-case character (not necessarily in the portion given by the indices).
* @return {@code true} if contains at least one upper-case character and no lower-case character.
*/
private static boolean isUpperCase(final CharSequence text, int lower, final int upper, boolean hasUpperCase) {
while (lower < upper) {
final int c = codePointAt(text, lower);
if (Character.isLowerCase(c)) {
return false;
}
if (!hasUpperCase) {
hasUpperCase = Character.isUpperCase(c);
}
lower += charCount(c);
}
return hasUpperCase;
}
/**
* Returns {@code true} if the given texts are equal, optionally ignoring case and filtered-out characters.
* This method is sometime used for comparing identifiers in a lenient way.
*
* <p><b>Example:</b> the following call compares the two strings ignoring case and any
* characters which are not {@linkplain Character#isLetterOrDigit(int) letter or digit}.
* In particular, spaces and punctuation characters like {@code '_'} and {@code '-'} are
* ignored:</p>
*
* {@preformat java
* assert equalsFiltered("WGS84", "WGS_84", Characters.Filter.LETTERS_AND_DIGITS, true) == true;
* }
*
* @param s1 the first characters sequence to compare, or {@code null}.
* @param s2 the second characters sequence to compare, or {@code null}.
* @param filter the subset of characters to compare, or {@code null} for comparing all characters.
* @param ignoreCase {@code true} for ignoring cases, or {@code false} for requiring exact match.
* @return {@code true} if both arguments are {@code null} or if the two given texts are equal,
* optionally ignoring case and filtered-out characters.
*/
public static boolean equalsFiltered(final CharSequence s1, final CharSequence s2,
final Characters.Filter filter, final boolean ignoreCase)
{
if (s1 == s2) {
return true;
}
if (s1 == null || s2 == null) {
return false;
}
if (filter == null) {
return ignoreCase ? equalsIgnoreCase(s1, s2) : equals(s1, s2);
}
final int lg1 = s1.length();
final int lg2 = s2.length();
int i1 = 0, i2 = 0;
while (i1 < lg1) {
int c1 = codePointAt(s1, i1);
final int n = charCount(c1);
if (filter.contains(c1)) {
int c2; // Fetch the next significant character from the second string.
do {
if (i2 >= lg2) {
return false; // The first string has more significant characters than expected.
}
c2 = codePointAt(s2, i2);
i2 += charCount(c2);
} while (!filter.contains(c2));
// Compare the characters in the same way than String.equalsIgnoreCase(String).
if (c1 != c2 && !(ignoreCase && equalsIgnoreCase(c1, c2))) {
return false;
}
}
i1 += n;
}
while (i2 < lg2) {
final int s = codePointAt(s2, i2);
if (filter.contains(s)) {
return false; // The first string has less significant characters than expected.
}
i2 += charCount(s);
}
return true;
}
/**
* Returns {@code true} if the given code points are equal, ignoring case.
* This method implements the same comparison algorithm than String#equalsIgnoreCase(String).
*
* <p>This method does not verify if {@code c1 == c2}. This check should have been done
* by the caller, since the caller code is a more optimal place for this check.</p>
*/
private static boolean equalsIgnoreCase(int c1, int c2) {
c1 = toUpperCase(c1);
c2 = toUpperCase(c2);
if (c1 == c2) {
return true;
}
// Need this check for Georgian alphabet.
return toLowerCase(c1) == toLowerCase(c2);
}
/**
* Returns {@code true} if the two given texts are equal, ignoring case.
* This method is similar to {@link String#equalsIgnoreCase(String)}, except
* it works on arbitrary character sequences and compares <cite>code points</cite>
* instead of characters.
*
* @param s1 the first string to compare, or {@code null}.
* @param s2 the second string to compare, or {@code null}.
* @return {@code true} if the two given texts are equal, ignoring case,
* or if both arguments are {@code null}.
*
* @see String#equalsIgnoreCase(String)
*/
public static boolean equalsIgnoreCase(final CharSequence s1, final CharSequence s2) {
if (s1 == s2) {
return true;
}
if (s1 == null || s2 == null) {
return false;
}
// Do not check for String cases. We do not want to delegate to String.equalsIgnoreCase
// because we compare code points while String.equalsIgnoreCase compares characters.
final int lg1 = s1.length();
final int lg2 = s2.length();
int i1 = 0, i2 = 0;
while (i1<lg1 && i2<lg2) {
final int c1 = codePointAt(s1, i1);
final int c2 = codePointAt(s2, i2);
if (c1 != c2 && !equalsIgnoreCase(c1, c2)) {
return false;
}
i1 += charCount(c1);
i2 += charCount(c2);
}
return i1 == i2;
}
/**
* Returns {@code true} if the two given texts are equal. This method delegates to
* {@link String#contentEquals(CharSequence)} if possible. This method never invoke
* {@link CharSequence#toString()} in order to avoid a potentially large copy of data.
*
* @param s1 the first string to compare, or {@code null}.
* @param s2 the second string to compare, or {@code null}.
* @return {@code true} if the two given texts are equal, or if both arguments are {@code null}.
*
* @see String#contentEquals(CharSequence)
*/
public static boolean equals(final CharSequence s1, final CharSequence s2) {
if (s1 == s2) {
return true;
}
if (s1 != null && s2 != null) {
if (s1 instanceof String) return ((String) s1).contentEquals(s2);
if (s2 instanceof String) return ((String) s2).contentEquals(s1);
final int length = s1.length();
if (s2.length() == length) {
for (int i=0; i<length; i++) {
if (s1.charAt(i) != s2.charAt(i)) {
return false;
}
}
return true;
}
}
return false;
}
/**
* Returns {@code true} if the given text at the given offset contains the given part,
* in a case-sensitive comparison. This method is equivalent to the following code,
* except that this method works on arbitrary {@link CharSequence} objects instead of
* {@link String}s only:
*
* {@preformat java
* return text.regionMatches(offset, part, 0, part.length());
* }
*
* This method does not thrown {@code IndexOutOfBoundsException}. Instead if
* {@code fromIndex < 0} or {@code fromIndex + part.length() > text.length()},
* then this method returns {@code false}.
*
* @param text the character sequence for which to tests for the presence of {@code part}.
* @param fromIndex the offset in {@code text} where to test for the presence of {@code part}.
* @param part the part which may be present in {@code text}.
* @return {@code true} if {@code text} contains {@code part} at the given {@code offset}.
* @throws NullPointerException if any of the arguments is null.
*
* @see String#regionMatches(int, String, int, int)
*/
public static boolean regionMatches(final CharSequence text, final int fromIndex, final CharSequence part) {
if (text instanceof String && part instanceof String) {
// It is okay to delegate to String implementation since we do not ignore cases.
return ((String) text).startsWith((String) part, fromIndex);
}
final int length;
if (fromIndex < 0 || fromIndex + (length = part.length()) > text.length()) {
return false;
}
for (int i=0; i<length; i++) {
// No need to use the code point API here, since we are looking for exact matches.
if (text.charAt(fromIndex + i) != part.charAt(i)) {
return false;
}
}
return true;
}
/**
* Returns {@code true} if the given text at the given offset contains the given part,
* optionally in a case-insensitive way. This method is equivalent to the following code,
* except that this method works on arbitrary {@link CharSequence} objects instead of
* {@link String}s only:
*
* {@preformat java
* return text.regionMatches(ignoreCase, offset, part, 0, part.length());
* }
*
* This method does not thrown {@code IndexOutOfBoundsException}. Instead if
* {@code fromIndex < 0} or {@code fromIndex + part.length() > text.length()},
* then this method returns {@code false}.
*
* @param text the character sequence for which to tests for the presence of {@code part}.
* @param fromIndex the offset in {@code text} where to test for the presence of {@code part}.
* @param part the part which may be present in {@code text}.
* @param ignoreCase {@code true} if the case should be ignored.
* @return {@code true} if {@code text} contains {@code part} at the given {@code offset}.
* @throws NullPointerException if any of the arguments is null.
*
* @see String#regionMatches(boolean, int, String, int, int)
*
* @since 0.4
*/
public static boolean regionMatches(final CharSequence text, int fromIndex, final CharSequence part, final boolean ignoreCase) {
if (!ignoreCase) {
return regionMatches(text, fromIndex, part);
}
// Do not check for String cases. We do not want to delegate to String.regionMatches
// because we compare code points while String.regionMatches(…) compares characters.
final int limit = text.length();
final int length = part.length();
if (fromIndex < 0) { // Not checked before because we want NullPointerException if an argument is null.
return false;
}
for (int i=0; i<length;) {
if (fromIndex >= limit) {
return false;
}
final int c1 = codePointAt(part, i);
final int c2 = codePointAt(text, fromIndex);
if (c1 != c2 && !equalsIgnoreCase(c1, c2)) {
return false;
}
fromIndex += charCount(c2);
i += charCount(c1);
}
return true;
}
/**
* Returns {@code true} if the given character sequence starts with the given prefix.
*
* @param text the characters sequence to test.
* @param prefix the expected prefix.
* @param ignoreCase {@code true} if the case should be ignored.
* @return {@code true} if the given sequence starts with the given prefix.
* @throws NullPointerException if any of the arguments is null.
*/
public static boolean startsWith(final CharSequence text, final CharSequence prefix, final boolean ignoreCase) {
return regionMatches(text, 0, prefix, ignoreCase);
}
/**
* Returns {@code true} if the given character sequence ends with the given suffix.
*
* @param text the characters sequence to test.
* @param suffix the expected suffix.
* @param ignoreCase {@code true} if the case should be ignored.
* @return {@code true} if the given sequence ends with the given suffix.
* @throws NullPointerException if any of the arguments is null.
*/
public static boolean endsWith(final CharSequence text, final CharSequence suffix, final boolean ignoreCase) {
int is = text.length();
int ip = suffix.length();
while (ip > 0) {
if (is <= 0) {
return false;
}
final int cs = codePointBefore(text, is);
final int cp = codePointBefore(suffix, ip);
if (cs != cp && (!ignoreCase || !equalsIgnoreCase(cs, cp))) {
return false;
}
is -= charCount(cs);
ip -= charCount(cp);
}
return true;
}
/**
* Returns the longest sequence of characters which is found at the beginning of the two given texts.
* If one of those texts is {@code null}, then the other text is returned.
* If there is no common prefix, then this method returns an empty string.
*
* @param s1 the first text, or {@code null}.
* @param s2 the second text, or {@code null}.
* @return the common prefix of both texts (may be empty), or {@code null} if both texts are null.
*/
public static CharSequence commonPrefix(final CharSequence s1, final CharSequence s2) {
if (s1 == null) return s2;
if (s2 == null) return s1;
final CharSequence shortest;
final int lg1 = s1.length();
final int lg2 = s2.length();
final int length;
if (lg1 <= lg2) {
shortest = s1;
length = lg1;
} else {
shortest = s2;
length = lg2;
}
int i = 0;
while (i < length) {
// No need to use the codePointAt API here, since we are looking for exact matches.
if (s1.charAt(i) != s2.charAt(i)) {
break;
}
i++;
}
return shortest.subSequence(0, i);
}
/**
* Returns the longest sequence of characters which is found at the end of the two given texts.
* If one of those texts is {@code null}, then the other text is returned.
* If there is no common suffix, then this method returns an empty string.
*
* @param s1 the first text, or {@code null}.
* @param s2 the second text, or {@code null}.
* @return the common suffix of both texts (may be empty), or {@code null} if both texts are null.
*/
public static CharSequence commonSuffix(final CharSequence s1, final CharSequence s2) {
if (s1 == null) return s2;
if (s2 == null) return s1;
final CharSequence shortest;
final int lg1 = s1.length();
final int lg2 = s2.length();
final int length;
if (lg1 <= lg2) {
shortest = s1;
length = lg1;
} else {
shortest = s2;
length = lg2;
}
int i = 0;
while (++i <= length) {
// No need to use the codePointAt API here, since we are looking for exact matches.
if (s1.charAt(lg1 - i) != s2.charAt(lg2 - i)) {
break;
}
}
i--;
return shortest.subSequence(length - i, shortest.length());
}
/**
* Returns the words found at the beginning and end of both texts.
* The returned string is the concatenation of the {@linkplain #commonPrefix common prefix}
* with the {@linkplain #commonSuffix common suffix}, with prefix and suffix eventually made
* shorter for avoiding to cut in the middle of a word.
*
* <p>The purpose of this method is to create a global identifier from a list of component identifiers.
* The later are often eastward and northward components of a vector, in which case this method provides
* an identifier for the vector as a whole.</p>
*
* <div class="note"><b>Example:</b>
* given the following inputs:
* <ul>
* <li>{@code "baroclinic_eastward_velocity"}</li>
* <li>{@code "baroclinic_northward_velocity"}</li>
* </ul>
* This method returns {@code "baroclinic_velocity"}. Note that the {@code "ward"} characters
* are a common suffix of both texts but nevertheless omitted because they cut a word.</div>
*
* <p>If one of those texts is {@code null}, then the other text is returned.
* If there is no common words, then this method returns an empty string.</p>
*
* <h4>Possible future evolution</h4>
* Current implementation searches only for a common prefix and a common suffix, ignoring any common words
* that may appear in the middle of the strings. A character is considered the beginning of a word if it is
* {@linkplain Character#isLetterOrDigit(int) a letter or digit} which is not preceded by another letter or
* digit (as leading "s" and "c" in "snake_case"), or if it is an {@linkplain Character#isUpperCase(int)
* upper case} letter preceded by a {@linkplain Character#isLowerCase(int) lower case} letter or no letter
* (as both "C" in "CamelCase").
*
* @param s1 the first text, or {@code null}.
* @param s2 the second text, or {@code null}.
* @return the common suffix of both texts (may be empty), or {@code null} if both texts are null.
*
* @since 1.1
*/
public static CharSequence commonWords(final CharSequence s1, final CharSequence s2) {
final int lg1 = length(s1);
final int lg2 = length(s2);
final int shortestLength = Math.min(lg1, lg2); // 0 if s1 or s2 is null, in which case prefix and suffix will have the other value.
final CharSequence prefix = commonPrefix(s1, s2); int prefixLength = length(prefix); if (prefixLength >= shortestLength) return prefix;
final CharSequence suffix = commonSuffix(s1, s2); int suffixLength = length(suffix); if (suffixLength >= shortestLength) return suffix;
final int length = prefixLength + suffixLength;
if (length >= lg1) return s1; // Check if one of the strings is already equal to prefix + suffix.
if (length >= lg2) return s2;
/*
* At this point `s1` and `s2` contain at least one character between the prefix and the suffix.
* If the prefix or the suffix seems to stop in the middle of a word, skip the remaining of that word.
* For example if `s1` and `s2` are "eastward_velocity" and "northward_velocity", the common suffix is
* "ward_velocity" but we want to retain only "velocity".
*
* The first condition below (before the loop) checks the character after the common prefix (for example "e"
* in "baroclinic_eastward_velocity" if the prefix is "baroclinic_"). The intent is to handle the case where
* the word separator is not the same (e.g. "baroclinic_eastward_velocity" and "baroclinic northward velocity",
* in which case the '_' or ' ' character would not appear in the prefix).
*/
if (!isWordBoundary(s1, prefixLength, codePointAt(s1, prefixLength)) &&
!isWordBoundary(s2, prefixLength, codePointAt(s2, prefixLength)))
{
while (prefixLength > 0) {
final int c = codePointBefore(prefix, prefixLength);
final int n = charCount(c);
prefixLength -= n;
if (isWordBoundary(prefix, prefixLength, c)) {
if (!isLetterOrDigit(c)) prefixLength += n; // Keep separator character.
break;
}
}
}
/*
* Same process than for the prefix above. The condition before the loop checks the character before suffix
* for the same reason than above, but using only `isLetterOrDigit` ignoring camel-case. The reason is that
* if the character before was a word separator according camel-case convention (i.e. an upper-case letter),
* we would need to include it in the common suffix.
*/
int suffixStart = 0;
if (isLetterOrDigit(codePointBefore(s1, lg1 - suffixLength)) &&
isLetterOrDigit(codePointBefore(s2, lg2 - suffixLength)))
{
while (suffixStart < suffixLength) {
final int c = codePointAt(suffix, suffixStart);
if (isWordBoundary(suffix, suffixStart, c)) break;
suffixStart += charCount(c);
}
}
/*
* At this point we got the final prefix and suffix to use. If the prefix or suffix is empty,
* trim whitespaces or '_' character. For example if the suffix is "_velocity" and no prefix,
* return "velocity" without leading "_" character.
*/
if (prefixLength == 0) {
while (suffixStart < suffixLength) {
final int c = codePointAt(suffix, suffixStart);
if (isLetterOrDigit(c)) {
return suffix.subSequence(suffixStart, suffixLength); // Skip leading ignorable characters in suffix.
}
suffixStart += charCount(c);
}
return "";
}
if (suffixStart >= suffixLength) {
while (prefixLength > 0) {
final int c = codePointBefore(prefix, prefixLength);
if (isLetterOrDigit(c)) {
return prefix.subSequence(0, prefixLength); // Skip trailing ignorable characters in prefix.
}
prefixLength -= charCount(c);
}
return "";
}
/*
* All special cases have been examined. Return the concatenation of (possibly shortened)
* common prefix and suffix.
*/
final StringBuilder buffer = new StringBuilder(prefixLength + suffixLength).append(prefix);
final int c1 = codePointBefore(prefix, prefixLength);
final int c2 = codePointAt(suffix, suffixStart);
if (isLetterOrDigit(c1) && isLetterOrDigit(c2)) {
if (!Character.isUpperCase(c2) || !isLowerCase(c1)) {
buffer.append(' '); // Keep a separator between two words (except if CamelCase is used).
}
} else if (c1 == c2) {
suffixStart += charCount(c2); // Avoid repeating '_' in e.g. "baroclinic_<removed>_velocity".
}
return buffer.append(suffix, suffixStart, suffixLength).toString();
}
/**
* Returns {@code true} if the character {@code c} is the beginning of a word or a non-word character.
* For example this method returns {@code true} if {@code c} is {@code '_'} in {@code "snake_case"} or
* {@code "C"} in {@code "CamelCase"}.
*
* @param s the character sequence from which the {@code c} character has been obtained.
* @param i the index in {@code s} where the {@code c} character has been obtained.
* @param c the code point in {@code s} as index {@code i}.
* @return whether the given character is the beginning of a word or a non-word character.
*/
private static boolean isWordBoundary(final CharSequence s, final int i, final int c) {
if (!isLetterOrDigit(c)) return true;
if (!Character.isUpperCase(c)) return false;
return (i <= 0 || isLowerCase(codePointBefore(s, i)));
}
/**
* Returns the token starting at the given offset in the given text. For the purpose of this
* method, a "token" is any sequence of consecutive characters of the same type, as defined
* below.
*
* <p>Let define <var>c</var> as the first non-blank character located at an index equals or
* greater than the given offset. Then the characters that are considered of the same type
* are:</p>
*
* <ul>
* <li>If <var>c</var> is a
* {@linkplain Character#isUnicodeIdentifierStart(int) Unicode identifier start},
* then any following characters that are
* {@linkplain Character#isUnicodeIdentifierPart(int) Unicode identifier part}.</li>
* <li>Otherwise any character for which {@link Character#getType(int)} returns
* the same value than for <var>c</var>.</li>
* </ul>
*
* @param text the text for which to get the token.
* @param fromIndex index of the fist character to consider in the given text.
* @return a sub-sequence of {@code text} starting at the given offset, or an empty string
* if there is no non-blank character at or after the given offset.
* @throws NullPointerException if the {@code text} argument is null.
*/
public static CharSequence token(final CharSequence text, int fromIndex) {
final int length = text.length();
int upper = fromIndex;
/*
* Skip whitespaces. At the end of this loop,
* 'c' will be the first non-blank character.
*/
int c;
do {
if (upper >= length) return "";
c = codePointAt(text, upper);
fromIndex = upper;
upper += charCount(c);
}
while (isWhitespace(c));
/*
* Advance over all characters "of the same type".
*/
if (isUnicodeIdentifierStart(c)) {
while (upper<length && isUnicodeIdentifierPart(c = codePointAt(text, upper))) {
upper += charCount(c);
}
} else {
final int type = getType(codePointAt(text, fromIndex));
while (upper<length && getType(c = codePointAt(text, upper)) == type) {
upper += charCount(c);
}
}
return text.subSequence(fromIndex, upper);
}
/**
* Replaces all occurrences of a given string in the given character sequence. If no occurrence of
* {@code toSearch} is found in the given text or if {@code toSearch} is equal to {@code replaceBy},
* then this method returns the {@code text} unchanged.
* Otherwise this method returns a new character sequence with all occurrences replaced by {@code replaceBy}.
*
* <p>This method is similar to {@link String#replace(CharSequence, CharSequence)} except that is accepts
* arbitrary {@code CharSequence} objects. As of Java 10, another difference is that this method does not
* create a new {@code String} if {@code toSearch} is equals to {@code replaceBy}.</p>
*
* @param text the character sequence in which to perform the replacements, or {@code null}.
* @param toSearch the string to replace.
* @param replaceBy the replacement for the searched string.
* @return the given text with replacements applied, or {@code text} if no replacement has been applied,
* or {@code null} if the given text was null
*
* @see String#replace(char, char)
* @see StringBuilders#replace(StringBuilder, String, String)
* @see String#replace(CharSequence, CharSequence)
*
* @since 0.4
*/
public static CharSequence replace(final CharSequence text, final CharSequence toSearch, final CharSequence replaceBy) {
ArgumentChecks.ensureNonEmpty("toSearch", toSearch);
ArgumentChecks.ensureNonNull ("replaceBy", replaceBy);
if (text != null && !toSearch.equals(replaceBy)) {
if (text instanceof String) {
return ((String) text).replace(toSearch, replaceBy);
}
final int length = text.length();
int i = indexOf(text, toSearch, 0, length);
if (i >= 0) {
int p = 0;
final int sl = toSearch.length();
final StringBuilder buffer = new StringBuilder(length + (replaceBy.length() - sl));
do {
buffer.append(text, p, i).append(replaceBy);
i = indexOf(text, toSearch, p = i + sl, length);
} while (i >= 0);
return buffer.append(text, p, length);
}
}
return text;
}
/**
* Copies a sequence of characters in the given {@code char[]} array.
*
* @param src the characters sequence from which to copy characters.
* @param srcOffset index of the first character from {@code src} to copy.
* @param dst the array where to copy the characters.
* @param dstOffset index where to write the first character in {@code dst}.
* @param length number of characters to copy.
*
* @see String#getChars(int, int, char[], int)
* @see StringBuilder#getChars(int, int, char[], int)
* @see StringBuffer#getChars(int, int, char[], int)
* @see CharBuffer#get(char[], int, int)
* @see javax.swing.text.Segment#array
*/
public static void copyChars(final CharSequence src, int srcOffset,
final char[] dst, int dstOffset, int length)
{
ArgumentChecks.ensurePositive("length", length);
if (src instanceof String) {
((String) src).getChars(srcOffset, srcOffset + length, dst, dstOffset);
} else if (src instanceof StringBuilder) {
((StringBuilder) src).getChars(srcOffset, srcOffset + length, dst, dstOffset);
} else if (src instanceof StringBuffer) {
((StringBuffer) src).getChars(srcOffset, srcOffset + length, dst, dstOffset);
} else if (src instanceof CharBuffer) {
((CharBuffer) src).subSequence(srcOffset, srcOffset + length).get(dst, dstOffset, length);
} else {
/*
* Another candidate could be `javax.swing.text.Segment`, but it
* is probably not worth to introduce a Swing dependency for it.
*/
while (length != 0) {
dst[dstOffset++] = src.charAt(srcOffset++);
length--;
}
}
}
}
| apache-2.0 |
pfirmstone/JGDMS | qa/src/org/apache/river/test/spec/config/configurationprovider/ValidConfigurationWithoutDefaults.java | 2339 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.river.test.spec.config.configurationprovider;
import net.jini.config.Configuration;
import net.jini.config.ConfigurationProvider;
import net.jini.config.ConfigurationException;
import net.jini.config.ConfigurationNotFoundException;
/**
* Some Configuration that can be instantiated but can not be
* really used. This configuration provider doesn't have default
* for options
*/
public class ValidConfigurationWithoutDefaults implements Configuration {
public static boolean wasCalled = false;
public ValidConfigurationWithoutDefaults(String[] options)
throws ConfigurationException {
this(options, null);
wasCalled = true;
}
public ValidConfigurationWithoutDefaults(String[] options, ClassLoader cl)
throws ConfigurationException {
wasCalled = true;
if (options == null) {
throw new ConfigurationNotFoundException(
"default options are not supplied");
}
}
public Object getEntry(String component, String name, Class type)
throws ConfigurationException {
throw new AssertionError();
};
public Object getEntry(String component, String name, Class type,
Object defaultValue) throws ConfigurationException {
throw new AssertionError();
};
public Object getEntry(String component, String name, Class type,
Object defaultValue, Object data) throws ConfigurationException {
throw new AssertionError();
};
}
| apache-2.0 |
Fabryprog/camel | components/camel-netty4/src/main/java/org/apache/camel/component/netty4/NettyWorkerPoolBuilder.java | 3053 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.netty4;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.epoll.EpollEventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import org.apache.camel.util.concurrent.CamelThreadFactory;
/**
* A builder to create Netty {@link io.netty.channel.EventLoopGroup} which can be used for sharing worker pools
* with multiple Netty {@link NettyServerBootstrapFactory} server bootstrap configurations.
*/
public final class NettyWorkerPoolBuilder {
private String name = "NettyWorker";
private String pattern;
private int workerCount;
private boolean nativeTransport;
private volatile EventLoopGroup workerPool;
public void setName(String name) {
this.name = name;
}
public void setPattern(String pattern) {
this.pattern = pattern;
}
public void setWorkerCount(int workerCount) {
this.workerCount = workerCount;
}
public void setNativeTransport(boolean nativeTransport) {
this.nativeTransport = nativeTransport;
}
public NettyWorkerPoolBuilder withName(String name) {
setName(name);
return this;
}
public NettyWorkerPoolBuilder withPattern(String pattern) {
setPattern(pattern);
return this;
}
public NettyWorkerPoolBuilder withWorkerCount(int workerCount) {
setWorkerCount(workerCount);
return this;
}
public NettyWorkerPoolBuilder withNativeTransport(boolean nativeTransport) {
setNativeTransport(nativeTransport);
return this;
}
/**
* Creates a new worker pool.
*/
public EventLoopGroup build() {
int count = workerCount > 0 ? workerCount : NettyHelper.DEFAULT_IO_THREADS;
if (nativeTransport) {
workerPool = new EpollEventLoopGroup(count, new CamelThreadFactory(pattern, name, false));
} else {
workerPool = new NioEventLoopGroup(count, new CamelThreadFactory(pattern, name, false));
}
return workerPool;
}
/**
* Shutdown the created worker pool
*/
public void destroy() {
if (workerPool != null) {
workerPool.shutdownGracefully();
workerPool = null;
}
}
}
| apache-2.0 |
apache/solr | solr/solrj/src/java/org/apache/solr/common/params/FacetParams.java | 17854 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.common.params;
import java.util.EnumSet;
import java.util.Locale;
import org.apache.solr.common.SolrException;
/** Facet parameters */
public interface FacetParams {
/** Should facet counts be calculated? */
public static final String FACET = "facet";
/**
* Numeric option indicating the maximum number of threads to be used in counting facet field
* vales
*/
public static final String FACET_THREADS = FACET + ".threads";
/** What method should be used to do the faceting */
public static final String FACET_METHOD = FACET + ".method";
/**
* Value for FACET_METHOD param to indicate that Solr should enumerate over terms in a field to
* calculate the facet counts.
*/
public static final String FACET_METHOD_enum = "enum";
/**
* Value for FACET_METHOD param to indicate that Solr should enumerate over documents and count up
* terms by consulting an uninverted representation of the field values (such as the FieldCache
* used for sorting).
*/
public static final String FACET_METHOD_fc = "fc";
/** Value for FACET_METHOD param, like FACET_METHOD_fc but counts per-segment. */
public static final String FACET_METHOD_fcs = "fcs";
/** Value for FACET_METHOD param to indicate that Solr should use an UnInvertedField */
public static final String FACET_METHOD_uif = "uif";
/**
* Any lucene formated queries the user would like to use for Facet Constraint Counts
* (multi-value)
*/
public static final String FACET_QUERY = FACET + ".query";
/**
* Any field whose terms the user wants to enumerate over for Facet Constraint Counts
* (multi-value)
*/
public static final String FACET_FIELD = FACET + ".field";
/** The offset into the list of facets. Can be overridden on a per field basis. */
public static final String FACET_OFFSET = FACET + ".offset";
/**
* Numeric option indicating the maximum number of facet field counts be included in the response
* for each field - in descending order of count. Can be overridden on a per field basis.
*/
public static final String FACET_LIMIT = FACET + ".limit";
/**
* Numeric option indicating the minimum number of hits before a facet should be included in the
* response. Can be overridden on a per field basis.
*/
public static final String FACET_MINCOUNT = FACET + ".mincount";
/**
* Boolean option indicating whether facet field counts of "0" should be included in the response.
* Can be overridden on a per field basis.
*/
public static final String FACET_ZEROS = FACET + ".zeros";
/**
* Boolean option indicating whether the response should include a facet field count for all
* records which have no value for the facet field. Can be overridden on a per field basis.
*/
public static final String FACET_MISSING = FACET + ".missing";
static final String FACET_OVERREQUEST = FACET + ".overrequest";
/**
* The percentage to over-request by when performing initial distributed requests.
*
* <p>default value is 1.5
*/
public static final String FACET_OVERREQUEST_RATIO = FACET_OVERREQUEST + ".ratio";
/**
* An additional amount to over-request by when performing initial distributed requests. This
* value will be added after accounting for the over-request ratio.
*
* <p>default value is 10
*/
public static final String FACET_OVERREQUEST_COUNT = FACET_OVERREQUEST + ".count";
/**
* Comma separated list of fields to pivot
*
* <p>example: author,type (for types by author / types within author)
*/
public static final String FACET_PIVOT = FACET + ".pivot";
/**
* Minimum number of docs that need to match to be included in the sublist
*
* <p>default value is 1
*/
public static final String FACET_PIVOT_MINCOUNT = FACET_PIVOT + ".mincount";
/**
* String option: "count" causes facets to be sorted by the count, "index" results in index order.
*/
public static final String FACET_SORT = FACET + ".sort";
public static final String FACET_SORT_COUNT = "count";
public static final String FACET_SORT_COUNT_LEGACY = "true";
public static final String FACET_SORT_INDEX = "index";
public static final String FACET_SORT_INDEX_LEGACY = "false";
/** Only return constraints of a facet field with the given prefix. */
public static final String FACET_PREFIX = FACET + ".prefix";
/** Only return constraints of a facet field containing the given string. */
public static final String FACET_CONTAINS = FACET + ".contains";
/** Only return constraints of a facet field containing the given string. */
public static final String FACET_MATCHES = FACET + ".matches";
/** If using facet contains, ignore case when comparing values. */
public static final String FACET_CONTAINS_IGNORE_CASE = FACET_CONTAINS + ".ignoreCase";
/** Only return constraints of a facet field excluding the given string. */
public static final String FACET_EXCLUDETERMS = FACET + ".excludeTerms";
/**
* When faceting by enumerating the terms in a field, only use the filterCache for terms with a df
* >= to this parameter.
*/
public static final String FACET_ENUM_CACHE_MINDF = FACET + ".enum.cache.minDf";
/**
* A boolean parameter that caps the facet counts at 1. With this set, a returned count will only
* be 0 or 1. For apps that don't need the count, this should be an optimization
*/
public static final String FACET_EXISTS = FACET + ".exists";
/**
* Any field whose terms the user wants to enumerate over for Facet Contraint Counts (multi-value)
*/
public static final String FACET_DATE = FACET + ".date";
/**
* Date string indicating the starting point for a date facet range. Can be overridden on a per
* field basis.
*/
public static final String FACET_DATE_START = FACET_DATE + ".start";
/**
* Date string indicating the ending point for a date facet range. Can be overridden on a per
* field basis.
*/
public static final String FACET_DATE_END = FACET_DATE + ".end";
/**
* Date Math string indicating the interval of sub-ranges for a date facet range. Can be
* overridden on a per field basis.
*/
public static final String FACET_DATE_GAP = FACET_DATE + ".gap";
/**
* Boolean indicating how counts should be computed if the range between 'start' and 'end' is not
* evenly divisible by 'gap'. If this value is true, then all counts of ranges involving the 'end'
* point will use the exact endpoint specified -- this includes the 'between' and 'after' counts
* as well as the last range computed using the 'gap'. If the value is false, then 'gap' is used
* to compute the effective endpoint closest to the 'end' param which results in the range between
* 'start' and 'end' being evenly divisible by 'gap'.
*
* <p>The default is false.
*
* <p>Can be overridden on a per field basis.
*/
public static final String FACET_DATE_HARD_END = FACET_DATE + ".hardend";
/**
* String indicating what "other" ranges should be computed for a date facet range (multi-value).
*
* <p>Can be overridden on a per field basis.
*
* @see FacetRangeOther
*/
public static final String FACET_DATE_OTHER = FACET_DATE + ".other";
/**
* Multivalued string indicating what rules should be applied to determine when the ranges
* generated for date faceting should be inclusive or exclusive of their end points.
*
* <p>The default value if none are specified is: [lower,upper,edge] <i>(NOTE: This is different
* then FACET_RANGE_INCLUDE)</i>
*
* <p>Can be overridden on a per field basis.
*
* @see FacetRangeInclude
* @see #FACET_RANGE_INCLUDE
*/
public static final String FACET_DATE_INCLUDE = FACET_DATE + ".include";
/**
* Any numerical field whose terms the user wants to enumerate over Facet Contraint Counts for
* selected ranges.
*/
public static final String FACET_RANGE = FACET + ".range";
/**
* Number indicating the starting point for a numerical range facet. Can be overridden on a per
* field basis.
*/
public static final String FACET_RANGE_START = FACET_RANGE + ".start";
/**
* Number indicating the ending point for a numerical range facet. Can be overridden on a per
* field basis.
*/
public static final String FACET_RANGE_END = FACET_RANGE + ".end";
/**
* Number indicating the interval of sub-ranges for a numerical facet range. Can be overridden on
* a per field basis.
*/
public static final String FACET_RANGE_GAP = FACET_RANGE + ".gap";
/**
* Boolean indicating how counts should be computed if the range between 'start' and 'end' is not
* evenly divisible by 'gap'. If this value is true, then all counts of ranges involving the 'end'
* point will use the exact endpoint specified -- this includes the 'between' and 'after' counts
* as well as the last range computed using the 'gap'. If the value is false, then 'gap' is used
* to compute the effective endpoint closest to the 'end' param which results in the range between
* 'start' and 'end' being evenly divisible by 'gap'.
*
* <p>The default is false.
*
* <p>Can be overridden on a per field basis.
*/
public static final String FACET_RANGE_HARD_END = FACET_RANGE + ".hardend";
/**
* String indicating what "other" ranges should be computed for a numerical range facet
* (multi-value). Can be overridden on a per field basis.
*/
public static final String FACET_RANGE_OTHER = FACET_RANGE + ".other";
/**
* Multivalued string indicating what rules should be applied to determine when the ranges
* generated for numeric faceting should be inclusive or exclusive of their end points.
*
* <p>The default value if none are specified is: lower
*
* <p>Can be overridden on a per field basis.
*
* @see FacetRangeInclude
*/
public static final String FACET_RANGE_INCLUDE = FACET_RANGE + ".include";
/**
* String indicating the method to use to resolve range facets.
*
* <p>Can be overridden on a per field basis.
*
* @see FacetRangeMethod
*/
public static final String FACET_RANGE_METHOD = FACET_RANGE + ".method";
/** Any field whose values the user wants to enumerate as explicit intervals of terms. */
public static final String FACET_INTERVAL = FACET + ".interval";
/** Set of terms for a single interval to facet on. */
public static final String FACET_INTERVAL_SET = FACET_INTERVAL + ".set";
/**
* A spatial RPT field to generate a 2D "heatmap" (grid of facet counts) on. Just like the other
* faceting types, this may include a 'key' or local-params to facet multiple times. All
* parameters with this suffix can be overridden on a per-field basis.
*/
public static final String FACET_HEATMAP = "facet.heatmap";
/** The format of the heatmap: either png or ints2D (default). */
public static final String FACET_HEATMAP_FORMAT = FACET_HEATMAP + ".format";
/**
* The region the heatmap should minimally enclose. It defaults to the world if not set. The
* format can either be a minimum to maximum point range format:
*
* <pre>["-150 10" TO "-100 30"]</pre>
*
* (the first is bottom-left and second is bottom-right, both of which are parsed as points are
* parsed). OR, any WKT can be provided and it's bounding box will be taken.
*/
public static final String FACET_HEATMAP_GEOM = FACET_HEATMAP + ".geom";
/**
* Specify the heatmap grid level explicitly, instead of deriving it via distErr or distErrPct.
*/
public static final String FACET_HEATMAP_LEVEL = FACET_HEATMAP + ".gridLevel";
/**
* Used to determine the heatmap grid level to compute, defaulting to 0.15. It has the same
* interpretation of distErrPct when searching on RPT, but relative to the shape in 'bbox'. It's a
* fraction (not a %) of the radius of the shape that grid squares must fit into without
* exceeding. > 0 and <= 0.5. Mutually exclusive with distErr & gridLevel.
*/
public static final String FACET_HEATMAP_DIST_ERR_PCT = FACET_HEATMAP + ".distErrPct";
/**
* Used to determine the heatmap grid level to compute (optional). It has the same interpretation
* of maxDistErr or distErr with RPT. It's an absolute distance (in units of what's specified on
* the field type) that a grid square must maximally fit into (width & height). It can be used
* to to more explicitly specify the maximum grid square size without knowledge of what particular
* grid levels translate to. This can in turn be used with knowledge of the size of 'bbox' to get
* a target minimum number of grid cells. Mutually exclusive with distErrPct & gridLevel.
*/
public static final String FACET_HEATMAP_DIST_ERR = FACET_HEATMAP + ".distErr";
/**
* The maximum number of cells (grid squares) the client is willing to handle. If this limit would
* be exceeded, we throw an error instead. Defaults to 100k.
*/
public static final String FACET_HEATMAP_MAX_CELLS = FACET_HEATMAP + ".maxCells";
/**
* An enumeration of the legal values for {@link #FACET_RANGE_OTHER} and {@link #FACET_DATE_OTHER}
* ...
*
* <ul>
* <li>before = the count of matches before the start
* <li>after = the count of matches after the end
* <li>between = the count of all matches between start and end
* <li>all = all of the above (default value)
* <li>none = no additional info requested
* </ul>
*
* @see #FACET_RANGE_OTHER
* @see #FACET_DATE_OTHER
*/
public enum FacetRangeOther {
BEFORE,
AFTER,
BETWEEN,
ALL,
NONE;
@Override
public String toString() {
return super.toString().toLowerCase(Locale.ROOT);
}
public static FacetRangeOther get(String label) {
try {
return valueOf(label.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST,
label + " is not a valid type of 'other' range facet information",
e);
}
}
}
/**
* An enumeration of the legal values for {@link #FACET_DATE_INCLUDE} and {@link
* #FACET_RANGE_INCLUDE} <br>
*
* <ul>
* <li>lower = all gap based ranges include their lower bound
* <li>upper = all gap based ranges include their upper bound
* <li>edge = the first and last gap ranges include their edge bounds (ie: lower for the first
* one, upper for the last one) even if the corresponding upper/lower option is not
* specified
* <li>outer = the BEFORE and AFTER ranges should be inclusive of their bounds, even if the
* first or last ranges already include those boundaries.
* <li>all = shorthand for lower, upper, edge, and outer
* </ul>
*
* @see #FACET_DATE_INCLUDE
* @see #FACET_RANGE_INCLUDE
*/
public enum FacetRangeInclude {
ALL,
LOWER,
UPPER,
EDGE,
OUTER;
@Override
public String toString() {
return super.toString().toLowerCase(Locale.ROOT);
}
public static FacetRangeInclude get(String label) {
try {
return valueOf(label.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST,
label + " is not a valid type of for range 'include' information",
e);
}
}
/**
* Convinience method for parsing the param value according to the correct semantics and
* applying the default of "LOWER"
*/
public static EnumSet<FacetRangeInclude> parseParam(final String[] param) {
// short circut for default behavior
if (null == param || 0 == param.length) return EnumSet.of(LOWER);
// build up set containing whatever is specified
final EnumSet<FacetRangeInclude> include = EnumSet.noneOf(FacetRangeInclude.class);
for (final String o : param) {
include.add(FacetRangeInclude.get(o));
}
// if set contains all, then we're back to short circuting
if (include.contains(FacetRangeInclude.ALL)) return EnumSet.allOf(FacetRangeInclude.class);
// use whatever we've got.
return include;
}
}
/**
* An enumeration of the legal values for {@link #FACET_RANGE_METHOD}
*
* <ul>
* <li>filter =
* <li>dv =
* </ul>
*
* @see #FACET_RANGE_METHOD
*/
public enum FacetRangeMethod {
FILTER,
DV;
@Override
public String toString() {
return super.toString().toLowerCase(Locale.ROOT);
}
public static FacetRangeMethod get(String label) {
try {
return valueOf(label.toUpperCase(Locale.ROOT));
} catch (IllegalArgumentException e) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST,
label + " is not a valid method for range faceting",
e);
}
}
public static FacetRangeMethod getDefault() {
return FILTER;
}
}
}
| apache-2.0 |
textocat/textokit-core | Textokit.PosTagger.API/src/main/java/com/textocat/textokit/morph/commons/TagUtils.java | 3235 | /*
* Copyright 2015 Textocat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.textocat.textokit.morph.commons;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.textocat.textokit.morph.dictionary.resource.GramModel;
import com.textocat.textokit.morph.fs.Word;
import com.textocat.textokit.morph.fs.Wordform;
import com.textocat.textokit.postagger.MorphCasUtils;
import java.util.BitSet;
import java.util.Set;
import static com.textocat.textokit.morph.commons.PunctuationUtils.punctuationTagMap;
import static com.textocat.textokit.morph.model.MorphConstants.*;
/**
* EXPERIMENTAL <br>
* EXPERIMENTAL <br>
* EXPERIMENTAL
*
* @author Rinat Gareev
*/
public class TagUtils {
private static final Set<String> closedPosSet = ImmutableSet.of(NPRO, Apro, PREP, CONJ, PRCL);
/**
* @param dict
* @return function that returns true if the given gram bits represents a
* closed class tag
*/
public static Function<BitSet, Boolean> getClosedClassIndicator(GramModel gm) {
// initialize mask
final BitSet closedClassTagsMask = new BitSet();
for (String cpGram : closedPosSet) {
closedClassTagsMask.set(gm.getGrammemNumId(cpGram));
}
//
return new Function<BitSet, Boolean>() {
@Override
public Boolean apply(BitSet _wfBits) {
BitSet wfBits = (BitSet) _wfBits.clone();
wfBits.and(closedClassTagsMask);
return !wfBits.isEmpty();
}
};
}
// FIXME refactor hard-coded dependency on a tag mapper implementation
public static boolean isClosedClassTag(String tag) {
return closedClassPunctuationTags.contains(tag)
|| !Sets.intersection(
GramModelBasedTagMapper.parseTag(tag), closedPosSet)
.isEmpty();
}
public static String postProcessExternalTag(String tag) {
return !"null".equals(String.valueOf(tag)) ? tag : null;
}
public static final Set<String> closedClassPunctuationTags = ImmutableSet
.copyOf(punctuationTagMap.values());
public static final Function<Word, String> tagFunction() {
return tagFunction;
}
private static final Function<Word, String> tagFunction = new Function<Word, String>() {
@Override
public String apply(Word word) {
if (word == null) {
return null;
}
Wordform wf = MorphCasUtils.requireOnlyWordform(word);
return wf.getPos();
}
};
private TagUtils() {
}
}
| apache-2.0 |
thomasbecker/jetty-7 | jetty-server/src/test/java/org/eclipse/jetty/server/handler/AbstractConnectHandlerTest.java | 5030 | package org.eclipse.jetty.server.handler;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.BufferedReader;
import java.io.EOFException;
import java.io.IOException;
import java.net.Socket;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.nio.SelectChannelConnector;
import org.junit.AfterClass;
/**
* @version $Revision$ $Date$
*/
public abstract class AbstractConnectHandlerTest
{
protected static Server server;
protected static Connector serverConnector;
protected static Server proxy;
protected static Connector proxyConnector;
protected static void startServer(Connector connector, Handler handler) throws Exception
{
server = new Server();
serverConnector = connector;
server.addConnector(serverConnector);
server.setHandler(handler);
server.start();
}
protected static void startProxy() throws Exception
{
proxy = new Server();
proxyConnector = new SelectChannelConnector();
proxy.addConnector(proxyConnector);
proxy.setHandler(new ConnectHandler());
proxy.start();
}
@AfterClass
public static void stop() throws Exception
{
stopProxy();
stopServer();
}
protected static void stopServer() throws Exception
{
server.stop();
server.join();
}
protected static void stopProxy() throws Exception
{
proxy.stop();
proxy.join();
}
protected Response readResponse(BufferedReader reader) throws IOException
{
// Simplified parser for HTTP responses
String line = reader.readLine();
if (line == null)
throw new EOFException();
Matcher responseLine = Pattern.compile("HTTP/1\\.1\\s+(\\d+)").matcher(line);
assertTrue(responseLine.lookingAt());
String code = responseLine.group(1);
Map<String, String> headers = new LinkedHashMap<String, String>();
while ((line = reader.readLine()) != null)
{
if (line.trim().length() == 0)
break;
Matcher header = Pattern.compile("([^:]+):\\s*(.*)").matcher(line);
assertTrue(header.lookingAt());
String headerName = header.group(1);
String headerValue = header.group(2);
headers.put(headerName.toLowerCase(), headerValue.toLowerCase());
}
StringBuilder body = new StringBuilder();
if (headers.containsKey("content-length"))
{
int length = Integer.parseInt(headers.get("content-length"));
for (int i = 0; i < length; ++i)
{
char c = (char)reader.read();
body.append(c);
}
}
else if ("chunked".equals(headers.get("transfer-encoding")))
{
while ((line = reader.readLine()) != null)
{
if ("0".equals(line))
{
line = reader.readLine();
assertEquals("", line);
break;
}
int length = Integer.parseInt(line, 16);
for (int i = 0; i < length; ++i)
{
char c = (char)reader.read();
body.append(c);
}
line = reader.readLine();
assertEquals("", line);
}
}
return new Response(code, headers, body.toString().trim());
}
protected Socket newSocket() throws IOException
{
Socket socket = new Socket("localhost", proxyConnector.getLocalPort());
socket.setSoTimeout(5000);
return socket;
}
protected class Response
{
private final String code;
private final Map<String, String> headers;
private final String body;
private Response(String code, Map<String, String> headers, String body)
{
this.code = code;
this.headers = headers;
this.body = body;
}
public String getCode()
{
return code;
}
public Map<String, String> getHeaders()
{
return headers;
}
public String getBody()
{
return body;
}
@Override
public String toString()
{
StringBuilder builder = new StringBuilder();
builder.append(code).append("\r\n");
for (Map.Entry<String, String> entry : headers.entrySet())
builder.append(entry.getKey()).append(": ").append(entry.getValue()).append("\r\n");
builder.append("\r\n");
builder.append(body);
return builder.toString();
}
}
}
| apache-2.0 |
robertoschwald/cas | support/cas-server-support-memcached-core/src/test/java/org/apereo/cas/memcached/kryo/CasKryoTranscoderTests.java | 12137 | package org.apereo.cas.memcached.kryo;
import org.apereo.cas.authentication.AcceptUsersAuthenticationHandler;
import org.apereo.cas.authentication.AuthenticationBuilder;
import org.apereo.cas.authentication.BasicCredentialMetaData;
import org.apereo.cas.authentication.DefaultAuthenticationBuilder;
import org.apereo.cas.authentication.DefaultAuthenticationHandlerExecutionResult;
import org.apereo.cas.authentication.UsernamePasswordCredential;
import org.apereo.cas.authentication.principal.DefaultPrincipalFactory;
import org.apereo.cas.mock.MockServiceTicket;
import org.apereo.cas.mock.MockTicketGrantingTicket;
import org.apereo.cas.services.RegisteredServiceTestUtils;
import org.apereo.cas.ticket.TicketGrantingTicket;
import org.apereo.cas.ticket.TicketGrantingTicketImpl;
import org.apereo.cas.ticket.support.MultiTimeUseOrTimeoutExpirationPolicy;
import org.apereo.cas.ticket.support.NeverExpiresExpirationPolicy;
import com.esotericsoftware.kryo.KryoException;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.junit.Test;
import javax.security.auth.login.AccountNotFoundException;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import static org.junit.Assert.*;
/**
* Unit test for {@link CasKryoTranscoder} class.
*
* @author Marvin S. Addison
* @since 3.0.0
*/
@Slf4j
public class CasKryoTranscoderTests {
private static final String ST_ID = "ST-1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890ABCDEFGHIJK";
private static final String TGT_ID = "TGT-1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ01234567890ABCDEFGHIJK-cas1";
private static final String USERNAME = "handymanbob";
private static final String PASSWORD = "foo";
private static final String NICKNAME_KEY = "nickname";
private static final String NICKNAME_VALUE = "bob";
private final CasKryoTranscoder transcoder;
private final Map<String, Object> principalAttributes;
public CasKryoTranscoderTests() {
val classesToRegister = new ArrayList<Class>();
classesToRegister.add(MockServiceTicket.class);
classesToRegister.add(MockTicketGrantingTicket.class);
this.transcoder = new CasKryoTranscoder(new CasKryoPool(classesToRegister));
this.principalAttributes = new HashMap<>();
this.principalAttributes.put(NICKNAME_KEY, NICKNAME_VALUE);
}
@Test
public void verifyEncodeDecodeTGTImpl() {
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final AuthenticationBuilder bldr = new DefaultAuthenticationBuilder(new DefaultPrincipalFactory()
.createPrincipal("user", new HashMap<>(this.principalAttributes)));
bldr.setAttributes(new HashMap<>(this.principalAttributes));
bldr.setAuthenticationDate(ZonedDateTime.now());
bldr.addCredential(new BasicCredentialMetaData(userPassCredential));
bldr.addFailure("error", new AccountNotFoundException());
bldr.addSuccess("authn", new DefaultAuthenticationHandlerExecutionResult(
new AcceptUsersAuthenticationHandler(""),
new BasicCredentialMetaData(userPassCredential)));
final TicketGrantingTicket expectedTGT = new TicketGrantingTicketImpl(TGT_ID,
RegisteredServiceTestUtils.getService(),
null, bldr.build(),
new NeverExpiresExpirationPolicy());
val ticket = expectedTGT.grantServiceTicket(ST_ID,
RegisteredServiceTestUtils.getService(),
new NeverExpiresExpirationPolicy(), false, true);
val result1 = transcoder.encode(expectedTGT);
val resultTicket = transcoder.decode(result1);
assertEquals(expectedTGT, resultTicket);
val result2 = transcoder.encode(ticket);
val resultStTicket1 = transcoder.decode(result2);
assertEquals(ticket, resultStTicket1);
val resultStTicket2 = transcoder.decode(result2);
assertEquals(ticket, resultStTicket2);
}
@Test
public void verifyEncodeDecode() {
val tgt = new MockTicketGrantingTicket(USERNAME);
val expectedST = new MockServiceTicket(ST_ID, RegisteredServiceTestUtils.getService(), tgt);
assertEquals(expectedST, transcoder.decode(transcoder.encode(expectedST)));
val expectedTGT = new MockTicketGrantingTicket(USERNAME);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
internalProxyTest();
}
private void internalProxyTest() {
val expectedTGT = new MockTicketGrantingTicket(USERNAME);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithUnmodifiableMap() {
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final TicketGrantingTicket expectedTGT =
new MockTicketGrantingTicket(TGT_ID, userPassCredential, new HashMap<>(this.principalAttributes));
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithUnmodifiableList() {
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
val values = new ArrayList<String>();
values.add(NICKNAME_VALUE);
val newAttributes = new HashMap<String, Object>();
newAttributes.put(NICKNAME_KEY, new ArrayList<>(values));
val expectedTGT = new MockTicketGrantingTicket(TGT_ID, userPassCredential, newAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithLinkedHashMap() {
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final TicketGrantingTicket expectedTGT =
new MockTicketGrantingTicket(TGT_ID, userPassCredential, new LinkedHashMap<>(this.principalAttributes));
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithListOrderedMap() {
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
final TicketGrantingTicket expectedTGT =
new MockTicketGrantingTicket(TGT_ID, userPassCredential, this.principalAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithUnmodifiableSet() {
val newAttributes = new HashMap<String, Object>();
val values = new HashSet<String>();
values.add(NICKNAME_VALUE);
//CHECKSTYLE:OFF
newAttributes.put(NICKNAME_KEY, Collections.unmodifiableSet(values));
//CHECKSTYLE:ON
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
val expectedTGT = new MockTicketGrantingTicket(TGT_ID, userPassCredential, newAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithSingleton() {
val newAttributes = new HashMap<String, Object>();
newAttributes.put(NICKNAME_KEY, Collections.singleton(NICKNAME_VALUE));
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
val expectedTGT = new MockTicketGrantingTicket(TGT_ID, userPassCredential, newAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeTGTWithSingletonMap() {
val newAttributes = Collections.<String, Object>singletonMap(NICKNAME_KEY, NICKNAME_VALUE);
val userPassCredential = new UsernamePasswordCredential(USERNAME, PASSWORD);
val expectedTGT = new MockTicketGrantingTicket(TGT_ID, userPassCredential, newAttributes);
expectedTGT.grantServiceTicket(ST_ID, null, null, false, true);
val result = transcoder.encode(expectedTGT);
assertEquals(expectedTGT, transcoder.decode(result));
assertEquals(expectedTGT, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeRegisteredService() {
val service = RegisteredServiceTestUtils.getRegisteredService("helloworld");
val result = transcoder.encode(service);
assertEquals(service, transcoder.decode(result));
assertEquals(service, transcoder.decode(result));
}
@Test
public void verifySTWithServiceTicketExpirationPolicy() {
// ServiceTicketExpirationPolicy is not registered with Kryo...
transcoder.getKryo().getClassResolver().reset();
val tgt = new MockTicketGrantingTicket(USERNAME);
val expectedST = new MockServiceTicket(ST_ID, RegisteredServiceTestUtils.getService(), tgt);
val step
= new MultiTimeUseOrTimeoutExpirationPolicy.ServiceTicketExpirationPolicy(1, 600);
expectedST.setExpiration(step);
val result = transcoder.encode(expectedST);
assertEquals(expectedST, transcoder.decode(result));
// Test it a second time - Ensure there's no problem with subsequent de-serializations.
assertEquals(expectedST, transcoder.decode(result));
}
@Test
public void verifyEncodeDecodeNonRegisteredClass() {
val tgt = new MockTicketGrantingTicket(USERNAME);
val expectedST = new MockServiceTicket(ST_ID, RegisteredServiceTestUtils.getService(), tgt);
// This class is not registered with Kryo
val step = new UnregisteredServiceTicketExpirationPolicy(1, 600);
expectedST.setExpiration(step);
try {
transcoder.encode(expectedST);
throw new AssertionError("Unregistered class is not allowed by Kryo");
} catch (final KryoException e) {
LOGGER.trace(e.getMessage(), e);
} catch (final Exception e) {
throw new AssertionError("Unexpected exception due to not resetting Kryo between de-serializations with unregistered class.");
}
}
/**
* Class for testing Kryo unregistered class handling.
*/
private static class UnregisteredServiceTicketExpirationPolicy extends MultiTimeUseOrTimeoutExpirationPolicy {
private static final long serialVersionUID = -1704993954986738308L;
/**
* Instantiates a new Service ticket expiration policy.
*
* @param numberOfUses the number of uses
* @param timeToKillInSeconds the time to kill in seconds
*/
UnregisteredServiceTicketExpirationPolicy(final int numberOfUses, final long timeToKillInSeconds) {
super(numberOfUses, timeToKillInSeconds);
}
}
}
| apache-2.0 |
fluidinfo/velocity-tools-packaging | src/main/java/org/apache/velocity/tools/view/DataInfo.java | 3790 | package org.apache.velocity.tools.view;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* <p>ToolInfo implementation to handle "primitive" data types.
* It currently supports String, Number, and Boolean data.</p>
*
* <p>An example of data elements specified in your toolbox.xml
* might be:
* <pre>
* <data type="string">
* <key>app_name</key>
* <value>FooWeb Deluxe</value>
* </data>
* <data type="number">
* <key>app_version</key>
* <value>4.2</value>
* </data>
* <data type="boolean">
* <key>debug</key>
* <value>true</value>
* </data>
* <data type="number">
* <key>screen_width</key>
* <value>400</value>
* </data>
* </pre></p>
*
* @author Nathan Bubna
* @deprecated Use {@link org.apache.velocity.tools.config.Data}
* @version $Id: DataInfo.java 651469 2008-04-25 00:46:13Z nbubna $
*/
@Deprecated
public class DataInfo implements ToolInfo
{
public static final String TYPE_STRING = "string";
public static final String TYPE_NUMBER = "number";
public static final String TYPE_BOOLEAN = "boolean";
private static final int TYPE_ID_STRING = 0;
private static final int TYPE_ID_NUMBER = 1;
private static final int TYPE_ID_BOOLEAN = 2;
private String key = null;
private int type_id = TYPE_ID_STRING;
private Object data = null;
public DataInfo() {}
/*********************** Mutators *************************/
public void setKey(String key)
{
this.key = key;
}
public void setType(String type)
{
if (TYPE_BOOLEAN.equalsIgnoreCase(type))
{
this.type_id = TYPE_ID_BOOLEAN;
}
else if (TYPE_NUMBER.equalsIgnoreCase(type))
{
this.type_id = TYPE_ID_NUMBER;
}
else /* if no type or type="string" */
{
this.type_id = TYPE_ID_STRING;
}
}
public void setValue(String value)
{
if (type_id == TYPE_ID_BOOLEAN)
{
this.data = Boolean.valueOf(value);
}
else if (type_id == TYPE_ID_NUMBER)
{
if (value.indexOf('.') >= 0)
{
this.data = new Double(value);
}
else
{
this.data = new Integer(value);
}
}
else /* type is "string" */
{
this.data = value;
}
}
/*********************** Accessors *************************/
public String getKey()
{
return key;
}
public String getClassname()
{
return data != null ? data.getClass().getName() : null;
}
/**
* Returns the data. Always returns the same
* object since the data is a constant. Initialization
* data is ignored.
*/
public Object getInstance(Object initData)
{
return data;
}
}
| apache-2.0 |
diegosalvi/docetproject | docet-core/src/main/java/docet/engine/DocetDocFormat.java | 1574 | /*
* Licensed to Diennea S.r.l. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Diennea S.r.l. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package docet.engine;
import java.util.Arrays;
/**
*
*
*/
public enum DocetDocFormat {
TYPE_HTML("html", false),
TYPE_PDF("pdf", true);
private String name;
private boolean includeResources;
private DocetDocFormat(final String name, final boolean includeResources) {
this.name = name;
this.includeResources = includeResources;
}
@Override
public String toString() {
return this.name;
}
public boolean isIncludeResources() {
return this.includeResources;
}
public static DocetDocFormat parseDocetRequestByName(final String name) {
return Arrays.asList(DocetDocFormat.values())
.stream()
.filter(req -> req.toString().equals(name)).findFirst().orElse(null);
}
} | apache-2.0 |
apache/tomcat | test/org/apache/coyote/http2/TestHttp2Section_6_1.java | 6884 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.coyote.http2;
import java.util.logging.Level;
import java.util.logging.LogManager;
import org.junit.Assert;
import org.junit.Test;
/**
* Unit tests for Section 6.1 of
* <a href="https://tools.ietf.org/html/rfc7540">RFC 7540</a>.
* <br>
* The order of tests in this class is aligned with the order of the
* requirements in the RFC.
*/
public class TestHttp2Section_6_1 extends Http2TestBase {
@Test
public void testDataFrame() throws Exception {
http2Connect();
// Disable overhead protection for window update as it breaks the test
http2Protocol.setOverheadWindowUpdateThreshold(0);
sendSimplePostRequest(3, null);
readSimplePostResponse(false);
Assert.assertEquals("0-WindowSize-[128]\n" +
"3-WindowSize-[128]\n" +
"3-HeadersStart\n" +
"3-Header-[:status]-[200]\n" +
"3-Header-[content-length]-[128]\n" +
"3-Header-[date]-[Wed, 11 Nov 2015 19:18:42 GMT]\n" +
"3-HeadersEnd\n" +
"3-Body-128\n" +
"3-EndOfStream\n", output.getTrace());
}
@Test
public void testDataFrameWithPadding() throws Exception {
LogManager.getLogManager().getLogger("org.apache.coyote").setLevel(Level.ALL);
LogManager.getLogManager().getLogger("org.apache.tomcat.util.net").setLevel(Level.ALL);
try {
http2Connect();
// Disable overhead protection for window update as it breaks the
// test
http2Protocol.setOverheadWindowUpdateThreshold(0);
byte[] padding = new byte[8];
sendSimplePostRequest(3, padding);
readSimplePostResponse(true);
// The window updates for padding could occur anywhere since they
// happen on a different thread to the response.
// The connection window update is always present if there is
// padding.
String trace = output.getTrace();
String paddingWindowUpdate = "0-WindowSize-[9]\n";
Assert.assertTrue(trace, trace.contains(paddingWindowUpdate));
trace = trace.replace(paddingWindowUpdate, "");
// The stream window update may or may not be present depending on
// timing. Remove it if present.
if (trace.contains("3-WindowSize-[9]\n")) {
trace = trace.replace("3-WindowSize-[9]\n", "");
}
Assert.assertEquals("0-WindowSize-[119]\n" +
"3-WindowSize-[119]\n" +
"3-HeadersStart\n" +
"3-Header-[:status]-[200]\n" +
"3-Header-[content-length]-[119]\n" +
"3-Header-[date]-[Wed, 11 Nov 2015 19:18:42 GMT]\n" +
"3-HeadersEnd\n" +
"3-Body-119\n" +
"3-EndOfStream\n", trace);
} finally {
LogManager.getLogManager().getLogger("org.apache.coyote").setLevel(Level.INFO);
LogManager.getLogManager().getLogger("org.apache.tomcat.util.net").setLevel(Level.INFO);
}
}
@Test
public void testDataFrameWithNonZeroPadding() throws Exception {
http2Connect();
byte[] padding = new byte[8];
padding[4] = 0x01;
sendSimplePostRequest(3, padding);
// May see Window updates depending on timing
skipWindowSizeFrames();
String trace = output.getTrace();
Assert.assertTrue(trace, trace.startsWith("0-Goaway-[3]-[1]-["));
}
@Test
public void testDataFrameOnStreamZero() throws Exception {
http2Connect();
byte[] dataFrame = new byte[10];
// Header
// length
ByteUtil.setThreeBytes(dataFrame, 0, 1);
// type (0 for data)
// flags (0)
// stream (0)
// payload (0)
os.write(dataFrame);
os.flush();
handleGoAwayResponse(1);
}
@Test
public void testDataFrameTooMuchPadding() throws Exception {
http2Connect();
byte[] dataFrame = new byte[10];
// Header
// length
ByteUtil.setThreeBytes(dataFrame, 0, 1);
// type 0 (data)
// flags 8 (padded)
dataFrame[4] = 0x08;
// stream 3
ByteUtil.set31Bits(dataFrame, 5, 3);
// payload (pad length of 1)
dataFrame[9] = 1;
os.write(dataFrame);
os.flush();
handleGoAwayResponse(1);
}
@Test
public void testDataFrameWithZeroLengthPadding() throws Exception {
http2Connect();
// Disable overhead protection for window update as it breaks the test
http2Protocol.setOverheadWindowUpdateThreshold(0);
byte[] padding = new byte[0];
sendSimplePostRequest(3, padding);
readSimplePostResponse(true);
// The window updates for padding could occur anywhere since they
// happen on a different thread to the response.
// The connection window update is always present if there is
// padding.
String trace = output.getTrace();
String paddingWindowUpdate = "0-WindowSize-[1]\n";
Assert.assertTrue(trace, trace.contains(paddingWindowUpdate));
trace = trace.replace(paddingWindowUpdate, "");
// The stream window update may or may not be present depending on
// timing. Remove it if present.
paddingWindowUpdate = "3-WindowSize-[1]\n";
if (trace.contains(paddingWindowUpdate)) {
trace = trace.replace(paddingWindowUpdate, "");
}
Assert.assertEquals("0-WindowSize-[127]\n" +
"3-WindowSize-[127]\n" +
"3-HeadersStart\n" +
"3-Header-[:status]-[200]\n" +
"3-Header-[content-length]-[127]\n" +
"3-Header-[date]-[Wed, 11 Nov 2015 19:18:42 GMT]\n" +
"3-HeadersEnd\n" +
"3-Body-127\n" +
"3-EndOfStream\n", trace);
}
}
| apache-2.0 |
mariusj/org.openntf.domino | domino/externals/javolution/src/main/java/javolution/xml/XMLSerializable.java | 1977 | /*
* Javolution - Java(TM) Solution for Real-Time and Embedded Systems
* Copyright (C) 2007 - Javolution (http://javolution.org/)
* All rights reserved.
*
* Permission to use, copy, modify, and distribute this software is
* freely granted, provided that this notice is preserved.
*/
package javolution.xml;
import java.io.Serializable;
/**
* <p> This interface identifies classes supporting XML serialization
* (XML serialization is still possible for classes not implementing this
* interface through dynamic {@link XMLBinding} though).</p>
*
* <p> Typically, classes implementing this interface have a protected static
* {@link XMLFormat} holding their default XML representation.
* For example:[code]
* public final class Complex implements XMLSerializable {
*
* // Use the cartesien form for the default XML representation.
* protected static final XMLFormat<Complex> XML = new XMLFormat<Complex>(Complex.class) {
* public Complex newInstance(Class<Complex> cls, InputElement xml) throws XMLStreamException {
* return Complex.valueOf(xml.getAttribute("real", 0.0),
* xml.getAttribute("imaginary", 0.0));
* }
* public void write(Complex complex, OutputElement xml) throws XMLStreamException {
* xml.setAttribute("real", complex.getReal());
* xml.setAttribute("imaginary", complex.getImaginary());
* }
* public void read(InputElement xml, Complex complex) {
* // Immutable, deserialization occurs at creation, ref. newIntance(...)
* }
* };
* ...
* }[/code]</p>
*
* @author <a href="mailto:jean-marie@dautelle.com">Jean-Marie Dautelle</a>
* @version 4.2, April 15, 2007
*/
public interface XMLSerializable extends Serializable {
// No method. Tagging interface.
}
| apache-2.0 |
jexp/idea2 | plugins/InspectionGadgets/testsrc/com/siyeh/ig/assignment/AssignmentToMethodParameterInspectionTest.java | 489 | package com.siyeh.ig.assignment;
import com.IGInspectionTestCase;
public class AssignmentToMethodParameterInspectionTest extends IGInspectionTestCase {
public void test() throws Exception {
final AssignmentToMethodParameterInspection inspection =
new AssignmentToMethodParameterInspection();
inspection.ignoreTransformationOfOriginalParameter = true;
doTest("com/siyeh/igtest/assignment/method_parameter",
inspection);
}
} | apache-2.0 |
cping/LGame | Java/old/OpenGL-1.0(old_ver)/Loon-backend-JavaSE/src/loon/core/graphics/component/table/TableLayout.java | 4975 | /**
* Copyright 2014
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* @project loon
* @author cping
* @email:javachenpeng@yahoo.com
* @version 0.4.2
*/
package loon.core.graphics.component.table;
import loon.core.graphics.LComponent;
import loon.core.graphics.LContainer;
import loon.core.graphics.device.LColor;
import loon.core.graphics.opengl.GLEx;
import loon.core.graphics.opengl.LTexture;
import loon.utils.collection.ArrayList;
public class TableLayout extends LContainer {
private TableLayoutRow[] tableRows;
private boolean grid = true;
public TableLayout(int x, int y, int w, int h) {
this(x, y, w, h, 4, 4);
}
public TableLayout(int x, int y, int w, int h, int cols, int rows) {
super(x, y, w, h);
prepareTable(cols, rows);
}
protected void renderComponents(GLEx g) {
for (int i = 0; i < getComponentCount(); i++) {
getComponents()[i].createUI(g);
}
if (grid) {
for (int i = 0; i < tableRows.length; i++) {
tableRows[i].paint(g);
}
g.drawRect(getX(), getY(), getWidth(), getHeight(), LColor.gray);
}
}
@Override
public void createUI(GLEx g, int x, int y, LComponent component,
LTexture[] buttonImage) {
}
private void prepareTable(int cols, int rows) {
tableRows = new TableLayoutRow[rows];
if (rows > 0 && cols > 0) {
int rowHeight = getHeight() / rows;
for (int i = 0; i < rows; i++) {
tableRows[i] = new TableLayoutRow(x(), y() + (i * rowHeight),
getWidth(), rowHeight, cols);
}
}
}
public void setComponent(LComponent component, int col, int row) {
add(component);
remove(tableRows[row].getComponent(col));
tableRows[row].setComponent(component, col);
}
public void removeComponent(int col, int row) {
remove(tableRows[row].getComponent(col));
tableRows[row].setComponent(null, col);
}
public void addRow(int column, int position) {
ArrayList newRows = new ArrayList();
int newRowHeight = getHeight() / (tableRows.length + 1);
if (canAddRow(newRowHeight)) {
if (position == 0) {
newRows.add(new TableLayoutRow(x(), y(), getWidth(),
newRowHeight, column));
}
for (int i = 0; i < tableRows.length; i++) {
if (i == position && position != 0) {
newRows.add(new TableLayoutRow(x(), y(), getWidth(),
newRowHeight, column));
}
newRows.add(tableRows[i]);
}
if (position == tableRows.length && position != 0) {
newRows.add(new TableLayoutRow(x(), y(), getWidth(),
newRowHeight, column));
}
for (int i = 0; i < newRows.size(); i++) {
((TableLayoutRow) newRows.get(i))
.setY(y() + (i * newRowHeight));
((TableLayoutRow) newRows.get(i)).setHeight(newRowHeight);
}
tableRows = (TableLayoutRow[]) newRows.toArray();
}
}
public void addRow(int column) {
addRow(column, tableRows.length);
}
private boolean canAddRow(int newRowHeight) {
if (tableRows != null && tableRows.length > 0) {
return tableRows[0].canSetHeight(newRowHeight);
}
return true;
}
public boolean setColumnWidth(int width, int col, int row) {
return tableRows[row].setColumnWidth(width, col);
}
public boolean setColumnHeight(int height, int row) {
if (!tableRows[row].canSetHeight(height)) {
return false;
}
tableRows[row].setHeight(height);
return true;
}
public void setMargin(int leftMargin, int rightMargin, int topMargin,
int bottomMargin, int col, int row) {
tableRows[row].getColumn(col).setMargin(leftMargin, rightMargin,
topMargin, bottomMargin);
}
public void setAlignment(int horizontalAlignment, int verticalAlignment,
int col, int row) {
tableRows[row].getColumn(col).setHorizontalAlignment(
horizontalAlignment);
tableRows[row].getColumn(col).setVerticalAlignment(verticalAlignment);
}
public int getRows() {
return tableRows.length;
}
public int getColumns(int row) {
return tableRows[row].getCoulumnSize();
}
@Override
public void setWidth(int width) {
boolean couldShrink = true;
for (int i = 0; i < tableRows.length; i++) {
if (!tableRows[i].setWidth(width)) {
couldShrink = false;
}
}
if (couldShrink) {
super.setWidth(width);
}
}
@Override
public void setHeight(int height) {
super.setHeight(height);
for (int i = 0; i < tableRows.length; i++) {
tableRows[i].setHeight(height);
}
}
public boolean isGrid() {
return grid;
}
public void setGrid(boolean grid) {
this.grid = grid;
}
@Override
public String getUIName() {
return "TableLayout";
}
}
| apache-2.0 |
mglukhikh/intellij-community | java/debugger/impl/src/com/intellij/debugger/engine/evaluation/expression/CodeFragmentEvaluator.java | 4502 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.engine.evaluation.expression;
import com.intellij.debugger.DebuggerBundle;
import com.intellij.debugger.engine.evaluation.EvaluateException;
import com.intellij.debugger.engine.evaluation.EvaluateExceptionUtil;
import com.intellij.debugger.engine.evaluation.EvaluateRuntimeException;
import com.intellij.debugger.jdi.VirtualMachineProxyImpl;
import com.intellij.openapi.diagnostic.Logger;
import java.util.HashMap;
import com.sun.jdi.Value;
import java.util.Map;
/**
* @author lex
*/
public class CodeFragmentEvaluator extends BlockStatementEvaluator{
private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.engine.evaluation.expression.CodeFragmentEvaluator");
private final CodeFragmentEvaluator myParentFragmentEvaluator;
private final Map<String, Object> mySyntheticLocals = new HashMap<>();
public CodeFragmentEvaluator(CodeFragmentEvaluator parentFragmentEvaluator) {
super(null);
myParentFragmentEvaluator = parentFragmentEvaluator;
}
public void setStatements(Evaluator[] evaluators) {
myStatements = evaluators;
}
public Value getValue(String localName, VirtualMachineProxyImpl vm) throws EvaluateException {
if(!mySyntheticLocals.containsKey(localName)) {
if(myParentFragmentEvaluator != null){
return myParentFragmentEvaluator.getValue(localName, vm);
} else {
throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.variable.not.declared", localName));
}
}
Object value = mySyntheticLocals.get(localName);
if(value instanceof Value) {
return (Value)value;
}
else if(value == null) {
return null;
}
else if(value instanceof Boolean) {
return vm.mirrorOf(((Boolean)value).booleanValue());
}
else if(value instanceof Byte) {
return vm.mirrorOf(((Byte)value).byteValue());
}
else if(value instanceof Character) {
return vm.mirrorOf(((Character)value).charValue());
}
else if(value instanceof Short) {
return vm.mirrorOf(((Short)value).shortValue());
}
else if(value instanceof Integer) {
return vm.mirrorOf(((Integer)value).intValue());
}
else if(value instanceof Long) {
return vm.mirrorOf(((Long)value).longValue());
}
else if(value instanceof Float) {
return vm.mirrorOf(((Float)value).floatValue());
}
else if(value instanceof Double) {
return vm.mirrorOf(((Double)value).doubleValue());
}
else if(value instanceof String) {
return vm.mirrorOf((String)value);
}
else {
LOG.error("unknown default initializer type " + value.getClass().getName());
return null;
}
}
private boolean hasValue(String localName) {
if(!mySyntheticLocals.containsKey(localName)) {
if(myParentFragmentEvaluator != null){
return myParentFragmentEvaluator.hasValue(localName);
} else {
return false;
}
} else {
return true;
}
}
public void setInitialValue(String localName, Object value) {
LOG.assertTrue(!(value instanceof Value), "use setValue for jdi values");
if(hasValue(localName)) {
throw new EvaluateRuntimeException(
EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.variable.already.declared", localName)));
}
mySyntheticLocals.put(localName, value);
}
public void setValue(String localName, Value value) throws EvaluateException {
if(!mySyntheticLocals.containsKey(localName)) {
if(myParentFragmentEvaluator != null){
myParentFragmentEvaluator.setValue(localName, value);
} else {
throw EvaluateExceptionUtil.createEvaluateException(DebuggerBundle.message("evaluation.error.variable.not.declared", localName));
}
}
else {
mySyntheticLocals.put(localName, value);
}
}
}
| apache-2.0 |
BigAppOS/BigApp_Discuz_Android | libs/ImageLib/src/com/kit/imagelib/imagelooker/OnPageSelectedListener.java | 126 | package com.kit.imagelib.imagelooker;
public interface OnPageSelectedListener {
public void onPageSelected();
}
| apache-2.0 |
ccooper1/OpenDash | src/main/java/org/apereo/lai/Institution.java | 967 | /*******************************************************************************
* Copyright 2015 Unicon (R) Licensed under the
* Educational Community License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*******************************************************************************/
/**
*
*/
package org.apereo.lai;
import java.io.Serializable;
/**
* @author ggilbert
*
*/
public interface Institution extends Serializable {
String getName();
String getKey();
String getSecret();
}
| apache-2.0 |
sunshine-life/GoodsManagement | src/main/java/org/goodsManagement/service/impl/PoiUtils/GetGoodsToExcel.java | 2120 | package org.goodsManagement.service.impl.PoiUtils;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.goodsManagement.po.GetGoodsDto;
import org.goodsManagement.vo.GetGoodsVO;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
/**
* Created by lifei on 2015/9/23.
*/
@Component
public class GetGoodsToExcel {
/*public static void main(String[] args){
List<GetGoodsVO> list = new ArrayList<GetGoodsVO>();
GetGoodsVO a1 = new GetGoodsVO();
a1.setStaffname("大黄");
a1.setGoodname("屎");
a1.setGetnumber(2);
a1.setGoodtype("一大坨");
list.add(a1);
GetGoodsVO a2 = new GetGoodsVO();
a2.setStaffname("小黄");
a2.setGoodname("屎");
a2.setGetnumber(2);
a2.setGoodtype("一桶");
list.add(a2);
String path = "C:\\Users\\lifei\\Desktop\\getgood.xls";
GetGoodsToExcel.toExcel(list,path);
System.out.println("导出完成");
}*/
/**
*
* @param list
* 数据库表中人员领用记录的集合
* @param path
* 要写入的文件的路径
*/
public void addtoExcel(List<GetGoodsVO> list,String path){
HSSFWorkbook wb = new HSSFWorkbook();
HSSFSheet sheet = wb.createSheet("Outgoods");
String[] n = { "姓名", "物品名称号", "物品型号", "物品数量" };
Object[][] value = new Object[list.size() + 1][4];
for (int m = 0; m < n.length; m++) {
value[0][m] = n[m];
}
for (int i = 0; i < list.size(); i++) {
GetGoodsVO getGoodsVOg= (GetGoodsVO) list.get(i);
value[i + 1][0] = getGoodsVOg.getStaffname();
value[i + 1][1] = getGoodsVOg.getGoodname();
value[i + 1][2] = getGoodsVOg.getGoodtype();
value[i + 1][3] = getGoodsVOg.getGetnumber();
}
ExcelUtils.writeArrayToExcel(wb, sheet, list.size() + 1, 4, value);
ExcelUtils.writeWorkbook(wb, path);
}
}
| apache-2.0 |
kevinearls/camel | platforms/spring-boot/components-starter/camel-bean-validator-starter/src/main/java/org/apache/camel/component/bean/validator/springboot/BeanValidatorComponentAutoConfiguration.java | 6273 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.bean.validator.springboot;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Generated;
import org.apache.camel.CamelContext;
import org.apache.camel.component.bean.validator.BeanValidatorComponent;
import org.apache.camel.spi.ComponentCustomizer;
import org.apache.camel.spi.HasId;
import org.apache.camel.spring.boot.CamelAutoConfiguration;
import org.apache.camel.spring.boot.ComponentConfigurationProperties;
import org.apache.camel.spring.boot.util.CamelPropertiesHelper;
import org.apache.camel.spring.boot.util.ConditionalOnCamelContextAndAutoConfigurationBeans;
import org.apache.camel.spring.boot.util.GroupCondition;
import org.apache.camel.spring.boot.util.HierarchicalPropertiesEvaluator;
import org.apache.camel.support.IntrospectionSupport;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.AutoConfigureAfter;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Conditional;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
/**
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo")
@Configuration
@Conditional({ConditionalOnCamelContextAndAutoConfigurationBeans.class,
BeanValidatorComponentAutoConfiguration.GroupConditions.class})
@AutoConfigureAfter(CamelAutoConfiguration.class)
@EnableConfigurationProperties({ComponentConfigurationProperties.class,
BeanValidatorComponentConfiguration.class})
public class BeanValidatorComponentAutoConfiguration {
private static final Logger LOGGER = LoggerFactory
.getLogger(BeanValidatorComponentAutoConfiguration.class);
@Autowired
private ApplicationContext applicationContext;
@Autowired
private CamelContext camelContext;
@Autowired
private BeanValidatorComponentConfiguration configuration;
@Autowired(required = false)
private List<ComponentCustomizer<BeanValidatorComponent>> customizers;
static class GroupConditions extends GroupCondition {
public GroupConditions() {
super("camel.component", "camel.component.bean-validator");
}
}
@Lazy
@Bean(name = "bean-validator-component")
@ConditionalOnMissingBean(BeanValidatorComponent.class)
public BeanValidatorComponent configureBeanValidatorComponent()
throws Exception {
BeanValidatorComponent component = new BeanValidatorComponent();
component.setCamelContext(camelContext);
Map<String, Object> parameters = new HashMap<>();
IntrospectionSupport.getProperties(configuration, parameters, null,
false);
for (Map.Entry<String, Object> entry : parameters.entrySet()) {
Object value = entry.getValue();
Class<?> paramClass = value.getClass();
if (paramClass.getName().endsWith("NestedConfiguration")) {
Class nestedClass = null;
try {
nestedClass = (Class) paramClass.getDeclaredField(
"CAMEL_NESTED_CLASS").get(null);
HashMap<String, Object> nestedParameters = new HashMap<>();
IntrospectionSupport.getProperties(value, nestedParameters,
null, false);
Object nestedProperty = nestedClass.newInstance();
CamelPropertiesHelper.setCamelProperties(camelContext,
nestedProperty, nestedParameters, false);
entry.setValue(nestedProperty);
} catch (NoSuchFieldException e) {
}
}
}
CamelPropertiesHelper.setCamelProperties(camelContext, component,
parameters, false);
if (ObjectHelper.isNotEmpty(customizers)) {
for (ComponentCustomizer<BeanValidatorComponent> customizer : customizers) {
boolean useCustomizer = (customizer instanceof HasId)
? HierarchicalPropertiesEvaluator.evaluate(
applicationContext.getEnvironment(),
"camel.component.customizer",
"camel.component.bean-validator.customizer",
((HasId) customizer).getId())
: HierarchicalPropertiesEvaluator.evaluate(
applicationContext.getEnvironment(),
"camel.component.customizer",
"camel.component.bean-validator.customizer");
if (useCustomizer) {
LOGGER.debug("Configure component {}, with customizer {}",
component, customizer);
customizer.customize(component);
}
}
}
return component;
}
} | apache-2.0 |
ddebrunner/streamsx.topology | java/src/com/ibm/streamsx/topology/internal/logging/Logging.java | 800 | /*
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2019
*/
package com.ibm.streamsx.topology.internal.logging;
import java.util.logging.Level;
import java.util.logging.Logger;
public interface Logging {
/**
* Set the root logging levels from Python logging integer level.
* @param levelS
*/
public static void setRootLevels(String levelS) {
int loggingLevel = Integer.valueOf(levelS);
Level level;
if (loggingLevel >= 40) {
level = Level.SEVERE;
} else if (loggingLevel >= 30) {
level = Level.WARNING;
} else if (loggingLevel >= 20) {
level = Level.CONFIG;
} else {
level = Level.FINE;
}
Logger.getLogger("").setLevel(level);
}
}
| apache-2.0 |
componavt/wikokit | android/common_wiki_android/src/wikokit/base/wikt/db/Decompressor.java | 1533 | package wikokit.base.wikt.db;
import android.util.Log;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
/** Decompress ziped file.
*
* @see http://www.jondev.net/articles/Unzipping_Files_with_Android_%28Programmatically%29
*/
public class Decompressor {
private String _zipFile;
private String _location;
public Decompressor(String zipFile, String location) {
_zipFile = zipFile;
_location = location;
_dirChecker("");
}
public void unzip() {
try {
FileInputStream fin = new FileInputStream(_zipFile);
ZipInputStream zin = new ZipInputStream(fin);
ZipEntry ze = null;
while ((ze = zin.getNextEntry()) != null) {
Log.v("Decompress", "Unzipping " + ze.getName());
if(ze.isDirectory()) {
_dirChecker(ze.getName());
} else {
FileOutputStream fout = new FileOutputStream(_location + ze.getName());
for (int c = zin.read(); c != -1; c = zin.read()) {
fout.write(c);
}
zin.closeEntry();
fout.close();
}
}
zin.close();
} catch(Exception e) {
Log.e("Decompress", "unzip", e);
}
}
private void _dirChecker(String dir) {
File f = new File(_location + dir);
if(!f.isDirectory()) {
f.mkdirs();
}
}
} | apache-2.0 |
Fabryprog/camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/tx/JMSTransactionalClientWithRollbackTest.java | 2206 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jms.tx;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.spring.CamelSpringTestSupport;
import org.junit.Test;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
* Simple unit test for transaction client EIP pattern and JMS.
*/
public class JMSTransactionalClientWithRollbackTest extends CamelSpringTestSupport {
protected ClassPathXmlApplicationContext createApplicationContext() {
return new ClassPathXmlApplicationContext(
"/org/apache/camel/component/jms/tx/JMSTransactionalClientWithRollbackTest.xml");
}
@Test
public void testTransactionSuccess() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.expectedBodiesReceived("Bye World");
// success at 3rd attempt
mock.message(0).header("count").isEqualTo(3);
template.sendBody("activemq:queue:okay", "Hello World");
mock.assertIsSatisfied();
}
public static class MyProcessor implements Processor {
private int count;
public void process(Exchange exchange) throws Exception {
exchange.getIn().setBody("Bye World");
exchange.getIn().setHeader("count", ++count);
}
}
} | apache-2.0 |
albdum/uprove | src/main/java/com/microsoft/uprove/UProveToken.java | 6515 | //*********************************************************
//
// Copyright (c) Microsoft. All rights reserved.
// This code is licensed under the Apache License Version 2.0.
// THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
// ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
// IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
// PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
//
//*********************************************************
package com.microsoft.uprove;
import java.util.Arrays;
/**
* Specifies a U-Prove token.
*/
public class UProveToken {
private byte[] issuerParametersUID;
private byte[] publicKey;
private byte[] tokenInformation;
private byte[] proverInformation;
private byte[] sigmaZ;
private byte[] sigmaC;
private byte[] sigmaR;
private boolean isDeviceProtected = false;
/**
* Constructs a new U-Prove token.
*/
public UProveToken() {
super();
}
/**
* Constructs a new U-Prove token.
* @param issuerParametersUID an issuer parameters UID.
* @param publicKey a public key.
* @param tokenInformation a token information value.
* @param proverInformation a prover information value.
* @param sigmaZ a sigmaZ value.
* @param sigmaC a sigmaC value.
* @param sigmaR a sigmaR value.
* @param isDeviceProtected indicates if the token is Device-protected.
*/
public UProveToken(byte[] issuerParametersUID, byte[] publicKey,
byte[] tokenInformation, byte[] proverInformation,
byte[] sigmaZ, byte[] sigmaC,
byte[] sigmaR, boolean isDeviceProtected) {
super();
this.issuerParametersUID = issuerParametersUID;
this.publicKey = publicKey;
this.tokenInformation = tokenInformation;
this.proverInformation = proverInformation;
this.sigmaZ = sigmaZ;
this.sigmaC = sigmaC;
this.sigmaR = sigmaR;
this.isDeviceProtected = isDeviceProtected;
}
/**
* Gets the issuer parameters UID value.
* @return the issuerParameters UID value.
*/
public byte[] getIssuerParametersUID() {
return issuerParametersUID;
}
/**
* Sets the issuer parameters UID value.
* @param issuerParametersUID the issuerParameters UID value to set.
*/
public void setIssuerParametersUID(byte[] issuerParametersUID) {
this.issuerParametersUID = issuerParametersUID;
}
/**
* Gets the public key value.
* @return the publicKey value.
*/
public byte[] getPublicKey() {
return publicKey;
}
/**
* Sets the public key value.
* @param publicKey the public key value to set.
*/
public void setPublicKey(byte[] publicKey) {
this.publicKey = publicKey;
}
/**
* Gets the token information value.
* @return the token information value.
*/
public byte[] getTokenInformation() {
return tokenInformation;
}
/**
* Sets the token information value.
* @param tokenInformation the token information value to set.
*/
public void setTokenInformation(byte[] tokenInformation) {
this.tokenInformation = tokenInformation;
}
/**
* Gets the prover information value.
* @return the prover information value.
*/
public byte[] getProverInformation() {
return proverInformation;
}
/**
* Sets the prover information value.
* @param proverInformation the prover information value to set.
*/
public void setProverInformation(byte[] proverInformation) {
this.proverInformation = proverInformation;
}
/**
* Gets the sigmaZ value.
* @return the sigmaZ value.
*/
public byte[] getSigmaZ() {
return sigmaZ;
}
/**
* Sets the sigmaZ value.
* @param sigmaZ the sigmaZ value to set.
*/
public void setSigmaZ(byte[] sigmaZ) {
this.sigmaZ = sigmaZ;
}
/**
* Gets the sigmaC value.
* @return the sigmaC value.
*/
public byte[] getSigmaC() {
return sigmaC;
}
/**
* Sets the sigmaC value.
* @param sigmaC the sigmaC value to set.
*/
public void setSigmaC(byte[] sigmaC) {
this.sigmaC = sigmaC;
}
/**
* Gets the sigmaR value.
* @return the sigmaR value.
*/
public byte[] getSigmaR() {
return sigmaR;
}
/**
* Sets the sigmaR value.
* @param sigmaR the sigmaR value to set.
*/
public void setSigmaR(byte[] sigmaR) {
this.sigmaR = sigmaR;
}
/**
* Returns true if the token is Device-protected, false otherwise.
* @return the Device-protected boolean.
*/
boolean isDeviceProtected() {
return isDeviceProtected;
}
/**
* Sets the boolean indicating if the token is Device-protected.
* @param isDeviceProtected true if the token is Device-protected.
*/
void setIsDeviceProtected(boolean isDeviceProtected) {
this.isDeviceProtected = isDeviceProtected;
}
/**
* Indicates whether some other object is "equal to" this one.
* @param o the reference object with which to compare.
* @return <code>true</code> if this object is the same as the
* <code>o</code> argument; <code>false</code> otherwise.
*/
public boolean equals(final Object o) {
if (o == this) {
return true;
}
if (!(o instanceof UProveToken)) {
return false;
}
UProveToken upt = (UProveToken) o;
return
Arrays.equals(this.issuerParametersUID, upt.issuerParametersUID) &&
Arrays.equals(this.publicKey, upt.publicKey) &&
Arrays.equals(this.tokenInformation, upt.tokenInformation) &&
Arrays.equals(this.proverInformation, upt.proverInformation) &&
Arrays.equals(this.sigmaZ, upt.sigmaZ) &&
Arrays.equals(this.sigmaC, upt.sigmaC) &&
Arrays.equals(this.sigmaR, upt.sigmaR) &&
this.isDeviceProtected == upt.isDeviceProtected;
}
/**
* Returns a hash code value for the object.
* @return a hash code value for the object.
*/
public int hashCode() {
int result = 237;
result = 201 * result + Arrays.hashCode(this.issuerParametersUID);
result = 201 * result + Arrays.hashCode(this.publicKey);
result = 201 * result + Arrays.hashCode(this.tokenInformation);
result = 201 * result + Arrays.hashCode(this.proverInformation);
result = 201 * result + Arrays.hashCode(this.sigmaZ);
result = 201 * result + Arrays.hashCode(this.sigmaC);
result = 201 * result + Arrays.hashCode(this.sigmaR);
result = result + (this.isDeviceProtected ? 201 : 0);
return result;
}
}
| apache-2.0 |
shanti/olio | webapp/java/trunk/ws/apps/webapp/src/java/org/apache/olio/webapp/cache/Cache.java | 2298 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.olio.webapp.cache;
/**
* The cache interface provides all operations necessary for the cache.
* We could have extended java.util.Map but that would make a lot of
* unnecessary work for the scope of this project. We can always implement that
* interface later if desired.
*/
public interface Cache {
/**
* Gets the cached value based on a key.
* @param key The key
* @return The cached object, or null if none is available
*/
Object get(String key);
/**
* Sets a cached item using a key.
* @param key The key
* @param value The object to cache.
*/
void put(String key, Object value);
/**
* Sets a cached item using a key.
* @param key The key
* @param value The object to cache.
* @param timeToLive Time to cache this object in seconds
*/
void put(String key, Object value, long timeToLive);
/**
* Invalidates a cached item using a key
* @param key
* @return success
*/
boolean invalidate(String key);
/*
* Check if cache needs refresh based on existence cached object and of Semaphore
* @param key The key
* @param cacheObjPresent false if the cache object for this key exists
* @return true if the cache object needs a refresh
*/
boolean needsRefresh (boolean cacheObjPresent, String key);
void doneRefresh (String key, long timeToNextRefresh) throws CacheException;
boolean isLocal();
}
| apache-2.0 |
apache/tapestry3 | tapestry-examples/VlibBeans/src/org/apache/tapestry/vlib/ejb/IBookQuery.java | 2080 | // Copyright 2004 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry.vlib.ejb;
import java.rmi.RemoteException;
import javax.ejb.EJBObject;
/**
* Remote interface for the BookQuery stateless session bean.
*
* @version $Id$
* @author Howard Lewis Ship
*
**/
public interface IBookQuery extends EJBObject
{
/**
* Returns the total number of results rows in the query.
*
**/
public int getResultCount() throws RemoteException;
/**
* Returns a selected subset of the results.
*
**/
public Book[] get(int offset, int length) throws RemoteException;
/**
* Performs a query of books with the matching title and (optionally) publisher.
*
* @param parameters defines subset of books to return.
* @param sortOrdering order of items in result set.
*
**/
public int masterQuery(MasterQueryParameters parameters, SortOrdering sortOrdering) throws RemoteException;
/**
* Queries on books owned by a given person.
*
**/
public int ownerQuery(Integer ownerPK, SortOrdering sortOrdering) throws RemoteException;
/**
* Queries on books held by a given person.
*
**/
public int holderQuery(Integer holderPK, SortOrdering sortOrdering) throws RemoteException;
/**
* Queries the list of books held by the borrower but not owned by the borrower.
*
**/
public int borrowerQuery(Integer borrowerPK, SortOrdering sortOrdering) throws RemoteException;
} | apache-2.0 |
apixandru/intellij-community | platform/structuralsearch/source/com/intellij/structuralsearch/impl/matcher/compiler/PatternCompiler.java | 20077 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.structuralsearch.impl.matcher.compiler;
import com.intellij.codeInsight.template.Template;
import com.intellij.codeInsight.template.TemplateManager;
import com.intellij.dupLocator.util.NodeFilter;
import com.intellij.lang.Language;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.LanguageFileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiErrorElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiRecursiveElementWalkingVisitor;
import com.intellij.psi.impl.source.tree.LeafElement;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.structuralsearch.*;
import com.intellij.structuralsearch.impl.matcher.CompiledPattern;
import com.intellij.structuralsearch.impl.matcher.MatcherImplUtil;
import com.intellij.structuralsearch.impl.matcher.PatternTreeContext;
import com.intellij.structuralsearch.impl.matcher.filters.LexicalNodesFilter;
import com.intellij.structuralsearch.impl.matcher.handlers.MatchingHandler;
import com.intellij.structuralsearch.impl.matcher.handlers.SubstitutionHandler;
import com.intellij.structuralsearch.impl.matcher.predicates.*;
import com.intellij.structuralsearch.plugin.ui.Configuration;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.SmartList;
import gnu.trove.TIntArrayList;
import gnu.trove.TIntHashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Compiles the handlers for usability
*/
public class PatternCompiler {
private static CompileContext lastTestingContext;
public static CompiledPattern compilePattern(final Project project, final MatchOptions options)
throws MalformedPatternException, NoMatchFoundException, UnsupportedOperationException {
FileType fileType = options.getFileType();
assert fileType instanceof LanguageFileType;
Language language = ((LanguageFileType)fileType).getLanguage();
StructuralSearchProfile profile = StructuralSearchUtil.getProfileByLanguage(language);
assert profile != null;
CompiledPattern result = profile.createCompiledPattern();
final String[] prefixes = result.getTypedVarPrefixes();
assert prefixes.length > 0;
final CompileContext context = new CompileContext(result, options, project);
if (ApplicationManager.getApplication().isUnitTestMode()) lastTestingContext = context;
try {
List<PsiElement> elements = compileByAllPrefixes(project, options, result, context, prefixes);
final CompiledPattern pattern = context.getPattern();
checkForUnknownVariables(pattern, elements);
pattern.setNodes(elements);
if (context.getSearchHelper().doOptimizing() && context.getSearchHelper().isScannedSomething()) {
final Set<PsiFile> set = context.getSearchHelper().getFilesSetToScan();
final List<PsiFile> filesToScan = new SmartList<>();
final GlobalSearchScope scope = (GlobalSearchScope)options.getScope();
for (final PsiFile file : set) {
if (!scope.contains(file.getVirtualFile())) {
continue;
}
filesToScan.add(file);
}
if (filesToScan.size() == 0) {
throw new NoMatchFoundException(SSRBundle.message("ssr.will.not.find.anything", scope.getDisplayName()));
}
result.setScope(new LocalSearchScope(PsiUtilCore.toPsiElementArray(filesToScan)));
}
} finally {
context.clear();
}
return result;
}
private static void checkForUnknownVariables(final CompiledPattern pattern, List<PsiElement> elements) {
for (PsiElement element : elements) {
element.accept(new PsiRecursiveElementWalkingVisitor() {
@Override
public void visitElement(PsiElement element) {
if (element.getUserData(CompiledPattern.HANDLER_KEY) != null) {
return;
}
super.visitElement(element);
if (!(element instanceof LeafElement) || !pattern.isTypedVar(element)) {
return;
}
final MatchingHandler handler = pattern.getHandler(pattern.getTypedVarString(element));
if (handler == null) {
throw new MalformedPatternException();
}
}
});
}
}
public static String getLastFindPlan() {
return ((TestModeOptimizingSearchHelper)lastTestingContext.getSearchHelper()).getSearchPlan();
}
@NotNull
private static List<PsiElement> compileByAllPrefixes(Project project,
MatchOptions options,
CompiledPattern pattern,
CompileContext context,
String[] applicablePrefixes) throws MalformedPatternException {
if (applicablePrefixes.length == 0) {
return Collections.emptyList();
}
List<PsiElement> elements = doCompile(project, options, pattern, new ConstantPrefixProvider(applicablePrefixes[0]), context);
if (elements.isEmpty()) {
return elements;
}
final PsiFile file = elements.get(0).getContainingFile();
if (file == null) {
return elements;
}
final PsiElement last = elements.get(elements.size() - 1);
final Pattern[] patterns = new Pattern[applicablePrefixes.length];
for (int i = 0; i < applicablePrefixes.length; i++) {
patterns[i] = Pattern.compile(StructuralSearchUtil.shieldRegExpMetaChars(applicablePrefixes[i]) + "\\w+\\b");
}
final int[] varEndOffsets = findAllTypedVarOffsets(file, patterns);
final int patternEndOffset = last.getTextRange().getEndOffset();
if (elements.size() == 0 ||
checkErrorElements(file, patternEndOffset, patternEndOffset, varEndOffsets, true) != Boolean.TRUE) {
return elements;
}
final int varCount = varEndOffsets.length;
final String[] prefixSequence = new String[varCount];
for (int i = 0; i < varCount; i++) {
prefixSequence[i] = applicablePrefixes[0];
}
final List<PsiElement> finalElements =
compileByPrefixes(project, options, pattern, context, applicablePrefixes, patterns, prefixSequence, 0);
return finalElements != null
? finalElements
: doCompile(project, options, pattern, new ConstantPrefixProvider(applicablePrefixes[0]), context);
}
@Nullable
private static List<PsiElement> compileByPrefixes(Project project,
MatchOptions options,
CompiledPattern pattern,
CompileContext context,
String[] applicablePrefixes,
Pattern[] substitutionPatterns,
String[] prefixSequence,
int index) throws MalformedPatternException {
if (index >= prefixSequence.length) {
final List<PsiElement> elements = doCompile(project, options, pattern, new ArrayPrefixProvider(prefixSequence), context);
if (elements.isEmpty()) {
return elements;
}
final PsiElement parent = elements.get(0).getParent();
final PsiElement last = elements.get(elements.size() - 1);
final int[] varEndOffsets = findAllTypedVarOffsets(parent.getContainingFile(), substitutionPatterns);
final int patternEndOffset = last.getTextRange().getEndOffset();
return checkErrorElements(parent, patternEndOffset, patternEndOffset, varEndOffsets, false) != Boolean.TRUE
? elements
: null;
}
String[] alternativeVariant = null;
for (String applicablePrefix : applicablePrefixes) {
prefixSequence[index] = applicablePrefix;
List<PsiElement> elements = doCompile(project, options, pattern, new ArrayPrefixProvider(prefixSequence), context);
if (elements.isEmpty()) {
return elements;
}
final PsiFile file = elements.get(0).getContainingFile();
if (file == null) {
return elements;
}
final int[] varEndOffsets = findAllTypedVarOffsets(file, substitutionPatterns);
final int offset = varEndOffsets[index];
final int patternEndOffset = elements.get(elements.size() - 1).getTextRange().getEndOffset();
final Boolean result = checkErrorElements(file, offset, patternEndOffset, varEndOffsets, false);
if (result == Boolean.TRUE) {
continue;
}
if (result == Boolean.FALSE || (result == null && alternativeVariant == null)) {
final List<PsiElement> finalElements =
compileByPrefixes(project, options, pattern, context, applicablePrefixes, substitutionPatterns, prefixSequence, index + 1);
if (finalElements != null) {
if (result == Boolean.FALSE) {
return finalElements;
}
alternativeVariant = new String[prefixSequence.length];
System.arraycopy(prefixSequence, 0, alternativeVariant, 0, prefixSequence.length);
}
}
}
return alternativeVariant != null ?
compileByPrefixes(project, options, pattern, context, applicablePrefixes, substitutionPatterns, alternativeVariant, index + 1) :
null;
}
@NotNull
private static int[] findAllTypedVarOffsets(final PsiFile file, final Pattern[] substitutionPatterns) {
final TIntHashSet result = new TIntHashSet();
file.accept(new PsiRecursiveElementWalkingVisitor() {
@Override
public void visitElement(PsiElement element) {
super.visitElement(element);
if (element instanceof LeafElement) {
final String text = element.getText();
for (Pattern pattern : substitutionPatterns) {
final Matcher matcher = pattern.matcher(text);
while (matcher.find()) {
result.add(element.getTextRange().getStartOffset() + matcher.end());
}
}
}
}
});
final int[] resultArray = result.toArray();
Arrays.sort(resultArray);
return resultArray;
}
/**
* False: there are no error elements before offset, except patternEndOffset
* Null: there are only error elements located exactly after template variables or at the end of the pattern
* True: otherwise
*/
@Nullable
private static Boolean checkErrorElements(PsiElement element,
final int offset,
final int patternEndOffset,
final int[] varEndOffsets,
final boolean strict) {
final TIntArrayList errorOffsets = new TIntArrayList();
final boolean[] containsErrorTail = {false};
final TIntHashSet varEndOffsetsSet = new TIntHashSet(varEndOffsets);
element.accept(new PsiRecursiveElementWalkingVisitor() {
@Override
public void visitErrorElement(PsiErrorElement element) {
super.visitErrorElement(element);
final int startOffset = element.getTextRange().getStartOffset();
if ((strict || !varEndOffsetsSet.contains(startOffset)) && startOffset != patternEndOffset) {
errorOffsets.add(startOffset);
}
if (startOffset == offset) {
containsErrorTail[0] = true;
}
}
});
for (int i = 0; i < errorOffsets.size(); i++) {
final int errorOffset = errorOffsets.get(i);
if (errorOffset <= offset) {
return true;
}
}
return containsErrorTail[0] ? null : false;
}
private interface PrefixProvider {
String getPrefix(int varIndex);
}
private static class ConstantPrefixProvider implements PrefixProvider {
private final String myPrefix;
ConstantPrefixProvider(String prefix) {
myPrefix = prefix;
}
@Override
public String getPrefix(int varIndex) {
return myPrefix;
}
}
private static class ArrayPrefixProvider implements PrefixProvider {
private final String[] myPrefixes;
ArrayPrefixProvider(String[] prefixes) {
myPrefixes = prefixes;
}
@Override
public String getPrefix(int varIndex) {
if (varIndex >= myPrefixes.length) return null;
return myPrefixes[varIndex];
}
}
private static List<PsiElement> doCompile(Project project,
MatchOptions options,
CompiledPattern result,
PrefixProvider prefixProvider,
CompileContext context) throws MalformedPatternException {
result.clearHandlers();
final StringBuilder buf = new StringBuilder();
Template template = TemplateManager.getInstance(project).createTemplate("","",options.getSearchPattern());
int segmentsCount = template.getSegmentsCount();
String text = template.getTemplateText();
int prevOffset = 0;
for(int i=0;i<segmentsCount;++i) {
final int offset = template.getSegmentOffset(i);
final String name = template.getSegmentName(i);
final String prefix = prefixProvider.getPrefix(i);
if (prefix == null) {
throw new MalformedPatternException();
}
buf.append(text.substring(prevOffset,offset));
buf.append(prefix);
buf.append(name);
MatchVariableConstraint constraint = options.getVariableConstraint(name);
if (constraint==null) {
// we do not edited the constraints
constraint = new MatchVariableConstraint();
constraint.setName( name );
options.addVariableConstraint(constraint);
}
SubstitutionHandler handler = result.createSubstitutionHandler(
name,
prefix + name,
constraint.isPartOfSearchResults(),
constraint.getMinCount(),
constraint.getMaxCount(),
constraint.isGreedy()
);
if(constraint.isWithinHierarchy()) {
handler.setSubtype(true);
}
if(constraint.isStrictlyWithinHierarchy()) {
handler.setStrictSubtype(true);
}
MatchPredicate predicate;
if (!StringUtil.isEmptyOrSpaces(constraint.getRegExp())) {
predicate = new RegExpPredicate(
constraint.getRegExp(),
options.isCaseSensitiveMatch(),
name,
constraint.isWholeWordsOnly(),
constraint.isPartOfSearchResults()
);
if (constraint.isInvertRegExp()) {
predicate = new NotPredicate(predicate);
}
addPredicate(handler,predicate);
}
if (constraint.isReference()) {
predicate = new ReferencePredicate( constraint.getNameOfReferenceVar() );
if (constraint.isInvertReference()) {
predicate = new NotPredicate(predicate);
}
addPredicate(handler,predicate);
}
addExtensionPredicates(options, constraint, handler);
addScriptConstraint(project, name, constraint, handler);
if (!StringUtil.isEmptyOrSpaces(constraint.getContainsConstraint())) {
predicate = new ContainsPredicate(name, constraint.getContainsConstraint());
if (constraint.isInvertContainsConstraint()) {
predicate = new NotPredicate(predicate);
}
addPredicate(handler,predicate);
}
if (!StringUtil.isEmptyOrSpaces(constraint.getWithinConstraint())) {
assert false;
}
prevOffset = offset;
}
MatchVariableConstraint constraint = options.getVariableConstraint(Configuration.CONTEXT_VAR_NAME);
if (constraint != null) {
SubstitutionHandler handler = result.createSubstitutionHandler(
Configuration.CONTEXT_VAR_NAME,
Configuration.CONTEXT_VAR_NAME,
constraint.isPartOfSearchResults(),
constraint.getMinCount(),
constraint.getMaxCount(),
constraint.isGreedy()
);
if (!StringUtil.isEmptyOrSpaces(constraint.getWithinConstraint())) {
MatchPredicate predicate = new WithinPredicate(constraint.getWithinConstraint(), options.getFileType(), project);
if (constraint.isInvertWithinConstraint()) {
predicate = new NotPredicate(predicate);
}
addPredicate(handler,predicate);
}
addExtensionPredicates(options, constraint, handler);
addScriptConstraint(project, Configuration.CONTEXT_VAR_NAME, constraint, handler);
}
buf.append(text.substring(prevOffset,text.length()));
PsiElement[] matchStatements;
try {
matchStatements = MatcherImplUtil.createTreeFromText(buf.toString(), PatternTreeContext.Block, options.getFileType(),
options.getDialect(), options.getPatternContext(), project, false);
if (matchStatements.length==0) throw new MalformedPatternException();
} catch (IncorrectOperationException e) {
throw new MalformedPatternException(e.getMessage());
}
NodeFilter filter = LexicalNodesFilter.getInstance();
GlobalCompilingVisitor compilingVisitor = new GlobalCompilingVisitor();
compilingVisitor.compile(matchStatements,context);
List<PsiElement> elements = new SmartList<>();
for (PsiElement matchStatement : matchStatements) {
if (!filter.accepts(matchStatement)) {
elements.add(matchStatement);
}
}
new DeleteNodesAction(compilingVisitor.getLexicalNodes()).run();
return elements;
}
private static void addExtensionPredicates(MatchOptions options, MatchVariableConstraint constraint, SubstitutionHandler handler) {
Set<MatchPredicate> predicates = new LinkedHashSet<>();
for (MatchPredicateProvider matchPredicateProvider : Extensions.getExtensions(MatchPredicateProvider.EP_NAME)) {
matchPredicateProvider.collectPredicates(constraint, handler.getName(), options, predicates);
}
for (MatchPredicate matchPredicate : predicates) {
addPredicate(handler, matchPredicate);
}
}
private static void addScriptConstraint(Project project, String name, MatchVariableConstraint constraint, SubstitutionHandler handler)
throws MalformedPatternException {
if (constraint.getScriptCodeConstraint()!= null && constraint.getScriptCodeConstraint().length() > 2) {
final String script = StringUtil.unquoteString(constraint.getScriptCodeConstraint());
final String problem = ScriptSupport.checkValidScript(script);
if (problem != null) {
throw new MalformedPatternException("Script constraint for " + constraint.getName() + " has problem " + problem);
}
addPredicate(handler, new ScriptPredicate(project, name, script));
}
}
private static void addPredicate(SubstitutionHandler handler, MatchPredicate predicate) {
if (handler.getPredicate()==null) {
handler.setPredicate(predicate);
} else {
handler.setPredicate(new AndPredicate(handler.getPredicate(), predicate));
}
}
} | apache-2.0 |
velmuruganvelayutham/jpa | examples/Chapter7/02-namedQueryExample/src/model/examples/model/Department.java | 759 | package examples.model;
import java.util.ArrayList;
import java.util.Collection;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.OneToMany;
@Entity
public class Department {
@Id
private int id;
private String name;
@OneToMany(mappedBy="department")
private Collection<Employee> employees;
public Department() {
employees = new ArrayList<Employee>();
}
public int getId() {
return id;
}
public String getName() {
return name;
}
public Collection<Employee> getEmployees() {
return employees;
}
public String toString() {
return "Department no: " + getId() +
", name: " + getName();
}
}
| apache-2.0 |
michalkurka/h2o-3 | h2o-core/src/test/java/water/jdbc/SQLManagerKeyOverwiteTest.java | 1246 | package water.jdbc;
import org.junit.Test;
import org.junit.runner.RunWith;
import water.Key;
import water.Keyed;
import water.fvec.Frame;
import water.runner.CloudSize;
import water.runner.H2ORunner;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.junit.Assert.*;
@RunWith(H2ORunner.class)
@CloudSize(1)
public class SQLManagerKeyOverwiteTest {
@Test public void nextKeyHasRightPrefixAndPostfix() {
final String prefix = "foo";
final String postfix = "bar";
final Key<Frame> key = SQLManager.nextTableKey(prefix, postfix);
assertTrue(key.toString().startsWith(prefix));
assertTrue(key.toString().endsWith(postfix));
}
@Test public void nextKeyKeyHasNoWhitechars() {
final Key<Frame> key = SQLManager.nextTableKey("f o o ", "b a r");
assertFalse(key.toString().contains("\\W"));
}
@Test public void makeRandomKeyCreatesUniqueKeys() {
final int count = 1000;
final long actualCount = IntStream.range(0, count)
.boxed()
.parallel()
.map(i -> SQLManager.nextTableKey("foo", "bar"))
.map(Key::toString)
.count();
assertEquals(count, actualCount);
}
}
| apache-2.0 |
GunoH/intellij-community | platform/execution-impl/src/com/intellij/execution/impl/statistics/RunConfigurationUsageTriggerCollector.java | 7712 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.execution.impl.statistics;
import com.intellij.execution.Executor;
import com.intellij.execution.configurations.ConfigurationFactory;
import com.intellij.execution.configurations.ConfigurationType;
import com.intellij.execution.configurations.RunConfiguration;
import com.intellij.execution.executors.ExecutorGroup;
import com.intellij.execution.target.TargetEnvironmentAwareRunProfile;
import com.intellij.execution.target.TargetEnvironmentConfiguration;
import com.intellij.execution.target.TargetEnvironmentType;
import com.intellij.execution.target.TargetEnvironmentsManager;
import com.intellij.internal.statistic.IdeActivityDefinition;
import com.intellij.internal.statistic.StructuredIdeActivity;
import com.intellij.internal.statistic.eventLog.EventLogGroup;
import com.intellij.internal.statistic.eventLog.events.*;
import com.intellij.internal.statistic.eventLog.validator.ValidationResultType;
import com.intellij.internal.statistic.eventLog.validator.rules.EventContext;
import com.intellij.internal.statistic.eventLog.validator.rules.impl.CustomValidationRule;
import com.intellij.internal.statistic.service.fus.collectors.CounterUsagesCollector;
import com.intellij.internal.statistic.utils.PluginInfo;
import com.intellij.internal.statistic.utils.PluginInfoDetectorKt;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.concurrency.NonUrgentExecutor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.List;
import static com.intellij.execution.impl.statistics.RunConfigurationTypeUsagesCollector.createFeatureUsageData;
public final class RunConfigurationUsageTriggerCollector extends CounterUsagesCollector {
public static final String GROUP_NAME = "run.configuration.exec";
private static final EventLogGroup GROUP = new EventLogGroup(GROUP_NAME, 62);
private static final ObjectEventField ADDITIONAL_FIELD = EventFields.createAdditionalDataField(GROUP_NAME, "started");
private static final StringEventField EXECUTOR = EventFields.StringValidatedByCustomRule("executor", "run_config_executor");
private static final StringEventField TARGET =
EventFields.StringValidatedByCustomRule("target", RunConfigurationUsageTriggerCollector.RunTargetValidator.RULE_ID);
private static final EnumEventField<RunConfigurationFinishType> FINISH_TYPE =
EventFields.Enum("finish_type", RunConfigurationFinishType.class);
private static final IdeActivityDefinition ACTIVITY_GROUP = GROUP.registerIdeActivity(null,
new EventField<?>[]{ADDITIONAL_FIELD, EXECUTOR,
TARGET,
RunConfigurationTypeUsagesCollector.FACTORY_FIELD,
RunConfigurationTypeUsagesCollector.ID_FIELD,
EventFields.PluginInfo},
new EventField<?>[]{FINISH_TYPE});
public static final VarargEventId UI_SHOWN_STAGE = ACTIVITY_GROUP.registerStage("ui.shown");
@Override
public EventLogGroup getGroup() {
return GROUP;
}
@NotNull
public static StructuredIdeActivity trigger(@NotNull Project project,
@NotNull ConfigurationFactory factory,
@NotNull Executor executor,
@Nullable RunConfiguration runConfiguration) {
return ACTIVITY_GROUP
.startedAsync(project, () -> ReadAction.nonBlocking(() -> buildContext(project, factory, executor, runConfiguration))
.expireWith(project)
.submit(NonUrgentExecutor.getInstance()));
}
private static @NotNull List<EventPair<?>> buildContext(@NotNull Project project,
@NotNull ConfigurationFactory factory,
@NotNull Executor executor,
@Nullable RunConfiguration runConfiguration) {
final ConfigurationType configurationType = factory.getType();
List<EventPair<?>> eventPairs = createFeatureUsageData(configurationType, factory);
ExecutorGroup<?> group = ExecutorGroup.getGroupIfProxy(executor);
eventPairs.add(EXECUTOR.with(group != null ? group.getId() : executor.getId()));
if (runConfiguration instanceof FusAwareRunConfiguration) {
List<EventPair<?>> additionalData = ((FusAwareRunConfiguration)runConfiguration).getAdditionalUsageData();
ObjectEventData objectEventData = new ObjectEventData(additionalData);
eventPairs.add(ADDITIONAL_FIELD.with(objectEventData));
}
if (runConfiguration instanceof TargetEnvironmentAwareRunProfile) {
String defaultTargetName = ((TargetEnvironmentAwareRunProfile)runConfiguration).getDefaultTargetName();
if (defaultTargetName != null) {
TargetEnvironmentConfiguration target = TargetEnvironmentsManager.getInstance(project).getTargets().findByName(defaultTargetName);
if (target != null) {
eventPairs.add(TARGET.with(target.getTypeId()));
}
}
}
return eventPairs;
}
public static void logProcessFinished(@Nullable StructuredIdeActivity activity,
RunConfigurationFinishType finishType) {
if (activity != null) {
activity.finished(() -> Collections.singletonList(FINISH_TYPE.with(finishType)));
}
}
public static class RunConfigurationExecutorUtilValidator extends CustomValidationRule {
@Override
public boolean acceptRuleId(@Nullable String ruleId) {
return "run_config_executor".equals(ruleId);
}
@NotNull
@Override
protected ValidationResultType doValidate(@NotNull String data, @NotNull EventContext context) {
for (Executor executor : Executor.EXECUTOR_EXTENSION_NAME.getExtensions()) {
if (StringUtil.equals(executor.getId(), data)) {
final PluginInfo info = PluginInfoDetectorKt.getPluginInfo(executor.getClass());
return info.isSafeToReport() ? ValidationResultType.ACCEPTED : ValidationResultType.THIRD_PARTY;
}
}
return ValidationResultType.REJECTED;
}
}
public static class RunTargetValidator extends CustomValidationRule {
public static final String RULE_ID = "run_target";
@Override
public boolean acceptRuleId(@Nullable String ruleId) {
return RULE_ID.equals(ruleId);
}
@NotNull
@Override
protected ValidationResultType doValidate(@NotNull String data, @NotNull EventContext context) {
for (TargetEnvironmentType<?> type : TargetEnvironmentType.EXTENSION_NAME.getExtensions()) {
if (StringUtil.equals(type.getId(), data)) {
final PluginInfo info = PluginInfoDetectorKt.getPluginInfo(type.getClass());
return info.isSafeToReport() ? ValidationResultType.ACCEPTED : ValidationResultType.THIRD_PARTY;
}
}
return ValidationResultType.REJECTED;
}
}
public enum RunConfigurationFinishType {FAILED_TO_START, UNKNOWN}
}
| apache-2.0 |
walterDurin/stickycode | net.stickycode.deploy.samples/sticky-deploy-sample-helloworld/src/main/java/net/stickycode/deploy/sample/helloworld/HelloWorld.java | 1097 | /**
* Copyright (c) 2010 RedEngine Ltd, http://www.redengine.co.nz. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package net.stickycode.deploy.sample.helloworld;
public class HelloWorld implements Runnable {
public void hello() {
System.out.println("Hello World!");
}
@Override
public void run() {
System.out.println("Hello Embedded World!");
try {
Thread.sleep(5000);
}
catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
| apache-2.0 |
apache/incubator-shardingsphere | shardingsphere-kernel/shardingsphere-parser/shardingsphere-parser-core/src/main/java/org/apache/shardingsphere/parser/rule/SQLParserRule.java | 1720 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.parser.rule;
import lombok.Getter;
import org.apache.shardingsphere.infra.rule.identifier.scope.GlobalRule;
import org.apache.shardingsphere.parser.config.SQLParserRuleConfiguration;
import org.apache.shardingsphere.sql.parser.api.CacheOption;
/**
* SQL parser rule.
*/
@Getter
public final class SQLParserRule implements GlobalRule {
private final boolean sqlCommentParseEnabled;
private final CacheOption sqlStatementCache;
private final CacheOption parseTreeCache;
public SQLParserRule(final SQLParserRuleConfiguration ruleConfig) {
sqlCommentParseEnabled = ruleConfig.isSqlCommentParseEnabled();
sqlStatementCache = ruleConfig.getSqlStatementCache();
parseTreeCache = ruleConfig.getParseTreeCache();
}
@Override
public String getType() {
return SQLParserRule.class.getSimpleName();
}
}
| apache-2.0 |
apache/incubator-asterixdb | hyracks-fullstack/algebricks/algebricks-runtime/src/main/java/org/apache/hyracks/algebricks/runtime/operators/sort/MicroSortRuntimeFactory.java | 8262 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.runtime.operators.sort;
import java.nio.ByteBuffer;
import java.util.List;
import org.apache.hyracks.algebricks.common.exceptions.NotImplementedException;
import org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputPushRuntime;
import org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputRuntimeFactory;
import org.apache.hyracks.api.comm.IFrameWriter;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.value.IBinaryComparator;
import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import org.apache.hyracks.api.dataflow.value.INormalizedKeyComputer;
import org.apache.hyracks.api.dataflow.value.INormalizedKeyComputerFactory;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.api.resources.IDeallocatable;
import org.apache.hyracks.api.util.CleanupUtils;
import org.apache.hyracks.dataflow.common.io.GeneratedRunFileReader;
import org.apache.hyracks.dataflow.std.buffermanager.EnumFreeSlotPolicy;
import org.apache.hyracks.dataflow.std.sort.Algorithm;
import org.apache.hyracks.dataflow.std.sort.ExternalSortRunGenerator;
import org.apache.hyracks.dataflow.std.sort.ExternalSortRunMerger;
public class MicroSortRuntimeFactory extends AbstractOneInputOneOutputRuntimeFactory {
private static final long serialVersionUID = 1L;
private final int framesLimit;
private final int[] sortFields;
private final INormalizedKeyComputerFactory[] keyNormalizerFactories;
private final IBinaryComparatorFactory[] comparatorFactories;
public MicroSortRuntimeFactory(int[] sortFields, INormalizedKeyComputerFactory firstKeyNormalizerFactory,
IBinaryComparatorFactory[] comparatorFactories, int[] projectionList, int framesLimit) {
this(sortFields, firstKeyNormalizerFactory != null
? new INormalizedKeyComputerFactory[] { firstKeyNormalizerFactory } : null, comparatorFactories,
projectionList, framesLimit);
}
public MicroSortRuntimeFactory(int[] sortFields, INormalizedKeyComputerFactory[] keyNormalizerFactories,
IBinaryComparatorFactory[] comparatorFactories, int[] projectionList, int framesLimit) {
super(projectionList);
// Obs: the projection list is currently ignored.
if (projectionList != null) {
throw new NotImplementedException("Cannot push projection into InMemorySortRuntime.");
}
this.sortFields = sortFields;
this.keyNormalizerFactories = keyNormalizerFactories;
this.comparatorFactories = comparatorFactories;
this.framesLimit = framesLimit;
}
@Override
public AbstractOneInputOneOutputPushRuntime createOneOutputPushRuntime(final IHyracksTaskContext ctx)
throws HyracksDataException {
InMemorySortPushRuntime pushRuntime = new InMemorySortPushRuntime(ctx);
ctx.registerDeallocatable(pushRuntime);
return pushRuntime;
}
private class InMemorySortPushRuntime extends AbstractOneInputOneOutputPushRuntime implements IDeallocatable {
final IHyracksTaskContext ctx;
ExternalSortRunGenerator runsGenerator = null;
ExternalSortRunMerger runsMerger = null;
IFrameWriter wrappingWriter = null;
private InMemorySortPushRuntime(IHyracksTaskContext ctx) {
this.ctx = ctx;
}
@Override
public void open() throws HyracksDataException {
if (runsGenerator == null) {
runsGenerator = new ExternalSortRunGenerator(ctx, sortFields, keyNormalizerFactories,
comparatorFactories, outputRecordDesc, Algorithm.MERGE_SORT, EnumFreeSlotPolicy.LAST_FIT,
framesLimit, Integer.MAX_VALUE);
}
// next writer will be opened later when preparing the merger
isOpen = true;
runsGenerator.open();
runsGenerator.getSorter().reset();
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
runsGenerator.nextFrame(buffer);
}
@Override
public void close() throws HyracksDataException {
Throwable failure = null;
if (isOpen) {
try {
if (!failed) {
runsGenerator.close();
createOrResetRunsMerger();
if (runsGenerator.getRuns().isEmpty()) {
wrappingWriter = runsMerger.prepareSkipMergingFinalResultWriter(writer);
wrappingWriter.open();
if (runsGenerator.getSorter().hasRemaining()) {
runsGenerator.getSorter().flush(wrappingWriter);
}
} else {
wrappingWriter = runsMerger.prepareFinalMergeResultWriter(writer);
wrappingWriter.open();
runsMerger.process(wrappingWriter);
}
}
} catch (Throwable th) {
failure = th;
fail(th);
} finally {
failure = CleanupUtils.close(wrappingWriter, failure);
wrappingWriter = null;
}
}
isOpen = false;
if (failure != null) {
throw HyracksDataException.create(failure);
}
}
@Override
public void fail() throws HyracksDataException {
failed = true;
// clean up the runs if some have been generated. double close should be idempotent.
if (runsGenerator != null) {
List<GeneratedRunFileReader> runs = runsGenerator.getRuns();
for (int i = 0, size = runs.size(); i < size; i++) {
try {
runs.get(i).close();
} catch (Throwable th) {
// ignore
}
}
}
if (wrappingWriter != null) {
wrappingWriter.fail();
}
}
@Override
public void deallocate() {
if (runsGenerator != null) {
try {
runsGenerator.getSorter().close();
} catch (Exception e) {
// ignore
}
}
}
private void createOrResetRunsMerger() {
if (runsMerger == null) {
IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
for (int i = 0; i < comparatorFactories.length; ++i) {
comparators[i] = comparatorFactories[i].createBinaryComparator();
}
INormalizedKeyComputer nmkComputer =
keyNormalizerFactories == null ? null : keyNormalizerFactories[0].createNormalizedKeyComputer();
runsMerger = new ExternalSortRunMerger(ctx, runsGenerator.getRuns(), sortFields, comparators,
nmkComputer, outputRecordDesc, framesLimit, Integer.MAX_VALUE);
} else {
runsMerger.reset(runsGenerator.getRuns());
}
}
}
}
| apache-2.0 |
visouza/solr-5.0.0 | solr/core/src/java/org/apache/solr/core/CoreContainer.java | 30728 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.apache.solr.cloud.ZkController;
import org.apache.solr.cloud.ZkSolrResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.handler.RequestHandlerBase;
import org.apache.solr.handler.admin.CollectionsHandler;
import org.apache.solr.handler.admin.CoreAdminHandler;
import org.apache.solr.handler.admin.InfoHandler;
import org.apache.solr.handler.component.ShardHandlerFactory;
import org.apache.solr.logging.LogWatcher;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.update.UpdateShardHandler;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.apache.solr.util.FileUtils;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import static com.google.common.base.Preconditions.checkNotNull;
/**
*
* @since solr 1.3
*/
public class CoreContainer {
protected static final Logger log = LoggerFactory.getLogger(CoreContainer.class);
final SolrCores solrCores = new SolrCores(this);
public static class CoreLoadFailure {
public final CoreDescriptor cd;
public final Exception exception;
public CoreLoadFailure(CoreDescriptor cd, Exception loadFailure) {
this.cd = cd;
this.exception = loadFailure;
}
}
protected final Map<String, CoreLoadFailure> coreInitFailures = new ConcurrentHashMap<>();
protected CoreAdminHandler coreAdminHandler = null;
protected CollectionsHandler collectionsHandler = null;
private InfoHandler infoHandler;
protected Properties containerProperties;
private ConfigSetService coreConfigService;
protected ZkContainer zkSys = new ZkContainer();
protected ShardHandlerFactory shardHandlerFactory;
private UpdateShardHandler updateShardHandler;
protected LogWatcher logging = null;
private CloserThread backgroundCloser = null;
protected final ConfigSolr cfg;
protected final SolrResourceLoader loader;
protected final String solrHome;
protected final CoresLocator coresLocator;
private String hostName;
private final JarRepository jarRepository = new JarRepository(this);
public static final String CORES_HANDLER_PATH = "/admin/cores";
public static final String COLLECTIONS_HANDLER_PATH = "/admin/collections";
public static final String INFO_HANDLER_PATH = "/admin/info";
private Map<String, SolrRequestHandler> containerHandlers = new HashMap<>();
public SolrRequestHandler getRequestHandler(String path) {
return RequestHandlerBase.getRequestHandler(path, containerHandlers);
}
public Map<String, SolrRequestHandler> getRequestHandlers(){
return this.containerHandlers;
}
// private ClientConnectionManager clientConnectionManager = new PoolingClientConnectionManager();
{
log.info("New CoreContainer " + System.identityHashCode(this));
}
/**
* Create a new CoreContainer using system properties to detect the solr home
* directory. The container's cores are not loaded.
* @see #load()
*/
public CoreContainer() {
this(new SolrResourceLoader(SolrResourceLoader.locateSolrHome()));
}
/**
* Create a new CoreContainer using the given SolrResourceLoader. The container's
* cores are not loaded.
* @param loader the SolrResourceLoader
* @see #load()
*/
public CoreContainer(SolrResourceLoader loader) {
this(ConfigSolr.fromSolrHome(loader, loader.getInstanceDir()));
}
/**
* Create a new CoreContainer using the given solr home directory. The container's
* cores are not loaded.
* @param solrHome a String containing the path to the solr home directory
* @see #load()
*/
public CoreContainer(String solrHome) {
this(new SolrResourceLoader(solrHome));
}
/**
* Create a new CoreContainer using the given SolrResourceLoader,
* configuration and CoresLocator. The container's cores are
* not loaded.
* @param config a ConfigSolr representation of this container's configuration
* @see #load()
*/
public CoreContainer(ConfigSolr config) {
this(config, config.getCoresLocator());
}
public CoreContainer(ConfigSolr config, CoresLocator locator) {
this.loader = config.getSolrResourceLoader();
this.solrHome = loader.getInstanceDir();
this.cfg = checkNotNull(config);
this.coresLocator = locator;
}
/**
* This method allows subclasses to construct a CoreContainer
* without any default init behavior.
*
* @param testConstructor pass (Object)null.
* @lucene.experimental
*/
protected CoreContainer(Object testConstructor) {
solrHome = null;
loader = null;
coresLocator = null;
cfg = null;
}
/**
* Create a new CoreContainer and load its cores
* @param solrHome the solr home directory
* @param configFile the file containing this container's configuration
* @return a loaded CoreContainer
*/
public static CoreContainer createAndLoad(String solrHome, File configFile) {
SolrResourceLoader loader = new SolrResourceLoader(solrHome);
CoreContainer cc = new CoreContainer(ConfigSolr.fromFile(loader, configFile));
try {
cc.load();
} catch (Exception e) {
cc.shutdown();
throw e;
}
return cc;
}
public Properties getContainerProperties() {
return containerProperties;
}
//-------------------------------------------------------------------
// Initialization / Cleanup
//-------------------------------------------------------------------
/**
* Load the cores defined for this CoreContainer
*/
public void load() {
log.info("Loading cores into CoreContainer [instanceDir={}]", loader.getInstanceDir());
// add the sharedLib to the shared resource loader before initializing cfg based plugins
String libDir = cfg.getSharedLibDirectory();
if (libDir != null) {
File f = FileUtils.resolvePath(new File(solrHome), libDir);
log.info("loading shared library: " + f.getAbsolutePath());
loader.addToClassLoader(libDir, null, false);
loader.reloadLuceneSPI();
}
shardHandlerFactory = ShardHandlerFactory.newInstance(cfg.getShardHandlerFactoryPluginInfo(), loader);
updateShardHandler = new UpdateShardHandler(cfg);
solrCores.allocateLazyCores(cfg.getTransientCacheSize(), loader);
logging = LogWatcher.newRegisteredLogWatcher(cfg.getLogWatcherConfig(), loader);
hostName = cfg.getHost();
log.info("Host Name: " + hostName);
zkSys.initZooKeeper(this, solrHome, cfg);
collectionsHandler = createHandler(cfg.getCollectionsHandlerClass(), CollectionsHandler.class);
containerHandlers.put(COLLECTIONS_HANDLER_PATH, collectionsHandler);
infoHandler = createHandler(cfg.getInfoHandlerClass(), InfoHandler.class);
containerHandlers.put(INFO_HANDLER_PATH, infoHandler);
coreAdminHandler = createHandler(cfg.getCoreAdminHandlerClass(), CoreAdminHandler.class);
containerHandlers.put(CORES_HANDLER_PATH, coreAdminHandler);
coreConfigService = cfg.createCoreConfigService(loader, zkSys.getZkController());
containerProperties = cfg.getSolrProperties();
// setup executor to load cores in parallel
// do not limit the size of the executor in zk mode since cores may try and wait for each other.
ExecutorService coreLoadExecutor = Executors.newFixedThreadPool(
( zkSys.getZkController() == null ? cfg.getCoreLoadThreadCount() : Integer.MAX_VALUE ),
new DefaultSolrThreadFactory("coreLoadExecutor") );
try {
List<CoreDescriptor> cds = coresLocator.discover(this);
checkForDuplicateCoreNames(cds);
List<Callable<SolrCore>> creators = new ArrayList<>();
for (final CoreDescriptor cd : cds) {
if (cd.isTransient() || !cd.isLoadOnStartup()) {
solrCores.putDynamicDescriptor(cd.getName(), cd);
}
if (cd.isLoadOnStartup()) {
creators.add(new Callable<SolrCore>() {
@Override
public SolrCore call() throws Exception {
if (zkSys.getZkController() != null) {
zkSys.getZkController().throwErrorIfReplicaReplaced(cd);
}
return create(cd, false);
}
});
}
}
try {
coreLoadExecutor.invokeAll(creators);
}
catch (InterruptedException e) {
throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, "Interrupted while loading cores");
}
// Start the background thread
backgroundCloser = new CloserThread(this, solrCores, cfg);
backgroundCloser.start();
} finally {
ExecutorUtil.shutdownNowAndAwaitTermination(coreLoadExecutor);
}
if (isZooKeeperAware()) {
// register in zk in background threads
Collection<SolrCore> cores = getCores();
if (cores != null) {
for (SolrCore core : cores) {
try {
zkSys.registerInZk(core, true);
} catch (Throwable t) {
SolrException.log(log, "Error registering SolrCore", t);
}
}
}
zkSys.getZkController().checkOverseerDesignate();
}
}
private static void checkForDuplicateCoreNames(List<CoreDescriptor> cds) {
Map<String, String> addedCores = Maps.newHashMap();
for (CoreDescriptor cd : cds) {
final String name = cd.getName();
if (addedCores.containsKey(name))
throw new SolrException(ErrorCode.SERVER_ERROR,
String.format(Locale.ROOT, "Found multiple cores with the name [%s], with instancedirs [%s] and [%s]",
name, addedCores.get(name), cd.getInstanceDir()));
addedCores.put(name, cd.getInstanceDir());
}
}
private volatile boolean isShutDown = false;
public boolean isShutDown() {
return isShutDown;
}
/**
* Stops all cores.
*/
public void shutdown() {
log.info("Shutting down CoreContainer instance="
+ System.identityHashCode(this));
isShutDown = true;
if (isZooKeeperAware()) {
cancelCoreRecoveries();
zkSys.publishCoresAsDown(solrCores.getCores());
}
try {
if (coreAdminHandler != null) coreAdminHandler.shutdown();
} catch (Exception e) {
log.warn("Error shutting down CoreAdminHandler. Continuing to close CoreContainer.", e);
}
try {
// First wake up the closer thread, it'll terminate almost immediately since it checks isShutDown.
synchronized (solrCores.getModifyLock()) {
solrCores.getModifyLock().notifyAll(); // wake up anyone waiting
}
if (backgroundCloser != null) { // Doesn't seem right, but tests get in here without initializing the core.
try {
backgroundCloser.join();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
if (log.isDebugEnabled()) {
log.debug("backgroundCloser thread was interrupted before finishing");
}
}
}
// Now clear all the cores that are being operated upon.
solrCores.close();
// It's still possible that one of the pending dynamic load operation is waiting, so wake it up if so.
// Since all the pending operations queues have been drained, there should be nothing to do.
synchronized (solrCores.getModifyLock()) {
solrCores.getModifyLock().notifyAll(); // wake up the thread
}
} finally {
try {
if (shardHandlerFactory != null) {
shardHandlerFactory.close();
}
} finally {
try {
if (updateShardHandler != null) {
updateShardHandler.close();
}
} finally {
// we want to close zk stuff last
zkSys.close();
}
}
}
org.apache.lucene.util.IOUtils.closeWhileHandlingException(loader); // best effort
}
public void cancelCoreRecoveries() {
List<SolrCore> cores = solrCores.getCores();
// we must cancel without holding the cores sync
// make sure we wait for any recoveries to stop
for (SolrCore core : cores) {
try {
core.getSolrCoreState().cancelRecovery();
} catch (Exception e) {
SolrException.log(log, "Error canceling recovery for core", e);
}
}
}
@Override
protected void finalize() throws Throwable {
try {
if(!isShutDown){
log.error("CoreContainer was not close prior to finalize(), indicates a bug -- POSSIBLE RESOURCE LEAK!!! instance=" + System.identityHashCode(this));
}
} finally {
super.finalize();
}
}
public CoresLocator getCoresLocator() {
return coresLocator;
}
protected SolrCore registerCore(String name, SolrCore core, boolean registerInZk) {
if( core == null ) {
throw new RuntimeException( "Can not register a null core." );
}
if( name == null ||
name.indexOf( '/' ) >= 0 ||
name.indexOf( '\\' ) >= 0 ){
throw new RuntimeException( "Invalid core name: "+name );
}
// We can register a core when creating them via the admin UI, so we need to insure that the dynamic descriptors
// are up to date
CoreDescriptor cd = core.getCoreDescriptor();
if ((cd.isTransient() || ! cd.isLoadOnStartup())
&& solrCores.getDynamicDescriptor(name) == null) {
// Store it away for later use. includes non-transient but not
// loaded at startup cores.
solrCores.putDynamicDescriptor(name, cd);
}
SolrCore old = null;
if (isShutDown) {
core.close();
throw new IllegalStateException("This CoreContainer has been close");
}
if (cd.isTransient()) {
old = solrCores.putTransientCore(cfg, name, core, loader);
} else {
old = solrCores.putCore(name, core);
}
/*
* set both the name of the descriptor and the name of the
* core, since the descriptors name is used for persisting.
*/
core.setName(name);
coreInitFailures.remove(name);
if( old == null || old == core) {
log.info( "registering core: "+name );
if (registerInZk) {
zkSys.registerInZk(core, false);
}
return null;
}
else {
log.info( "replacing core: "+name );
old.close();
if (registerInZk) {
zkSys.registerInZk(core, false);
}
return old;
}
}
/**
* Creates a new core based on a CoreDescriptor, publishing the core state to the cluster
* @param cd the CoreDescriptor
* @return the newly created core
*/
public SolrCore create(CoreDescriptor cd) {
return create(cd, true);
}
/**
* Creates a new core based on a CoreDescriptor.
*
* @param dcore a core descriptor
* @param publishState publish core state to the cluster if true
*
* @return the newly created core
*/
public SolrCore create(CoreDescriptor dcore, boolean publishState) {
if (isShutDown) {
throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE, "Solr has close.");
}
try {
if (zkSys.getZkController() != null) {
zkSys.getZkController().preRegister(dcore);
}
ConfigSet coreConfig = coreConfigService.getConfig(dcore);
log.info("Creating SolrCore '{}' using configuration from {}", dcore.getName(), coreConfig.getName());
SolrCore core = new SolrCore(dcore, coreConfig);
solrCores.addCreated(core);
// always kick off recovery if we are in non-Cloud mode
if (!isZooKeeperAware() && core.getUpdateHandler().getUpdateLog() != null) {
core.getUpdateHandler().getUpdateLog().recoverFromLog();
}
registerCore(dcore.getName(), core, publishState);
return core;
} catch (Exception e) {
coreInitFailures.put(dcore.getName(), new CoreLoadFailure(dcore, e));
log.error("Error creating core [{}]: {}", dcore.getName(), e.getMessage(), e);
throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to create core [" + dcore.getName() + "]", e);
} catch (Throwable t) {
SolrException e = new SolrException(ErrorCode.SERVER_ERROR, "JVM Error creating core [" + dcore.getName() + "]: " + t.getMessage(), t);
log.error("Error creating core [{}]: {}", dcore.getName(), t.getMessage(), t);
coreInitFailures.put(dcore.getName(), new CoreLoadFailure(dcore, e));
throw t;
}
}
/**
* @return a Collection of registered SolrCores
*/
public Collection<SolrCore> getCores() {
return solrCores.getCores();
}
/**
* @return a Collection of the names that cores are mapped to
*/
public Collection<String> getCoreNames() {
return solrCores.getCoreNames();
}
/** This method is currently experimental.
* @return a Collection of the names that a specific core is mapped to.
*/
public Collection<String> getCoreNames(SolrCore core) {
return solrCores.getCoreNames(core);
}
/**
* get a list of all the cores that are currently loaded
* @return a list of al lthe available core names in either permanent or transient core lists.
*/
public Collection<String> getAllCoreNames() {
return solrCores.getAllCoreNames();
}
/**
* Returns an immutable Map of Exceptions that occured when initializing
* SolrCores (either at startup, or do to runtime requests to create cores)
* keyed off of the name (String) of the SolrCore that had the Exception
* during initialization.
* <p>
* While the Map returned by this method is immutable and will not change
* once returned to the client, the source data used to generate this Map
* can be changed as various SolrCore operations are performed:
* </p>
* <ul>
* <li>Failed attempts to create new SolrCores will add new Exceptions.</li>
* <li>Failed attempts to re-create a SolrCore using a name already contained in this Map will replace the Exception.</li>
* <li>Failed attempts to reload a SolrCore will cause an Exception to be added to this list -- even though the existing SolrCore with that name will continue to be available.</li>
* <li>Successful attempts to re-created a SolrCore using a name already contained in this Map will remove the Exception.</li>
* <li>Registering an existing SolrCore with a name already contained in this Map (ie: ALIAS or SWAP) will remove the Exception.</li>
* </ul>
*/
public Map<String, CoreLoadFailure> getCoreInitFailures() {
return ImmutableMap.copyOf(coreInitFailures);
}
// ---------------- Core name related methods ---------------
/**
* Recreates a SolrCore.
* While the new core is loading, requests will continue to be dispatched to
* and processed by the old core
*
* @param name the name of the SolrCore to reload
*/
public void reload(String name) {
SolrCore core = solrCores.getCoreFromAnyList(name, false);
if (core == null)
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "No such core: " + name );
CoreDescriptor cd = core.getCoreDescriptor();
try {
solrCores.waitAddPendingCoreOps(name);
ConfigSet coreConfig = coreConfigService.getConfig(cd);
log.info("Reloading SolrCore '{}' using configuration from {}", cd.getName(), coreConfig.getName());
SolrCore newCore = core.reload(coreConfig);
registerCore(name, newCore, false);
}
catch (Exception e) {
coreInitFailures.put(cd.getName(), new CoreLoadFailure(cd, e));
throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to reload core [" + cd.getName() + "]", e);
}
finally {
solrCores.removeFromPendingOps(name);
}
}
/**
* Swaps two SolrCore descriptors.
*/
public void swap(String n0, String n1) {
if( n0 == null || n1 == null ) {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "Can not swap unnamed cores." );
}
solrCores.swap(n0, n1);
coresLocator.swap(this, solrCores.getCoreDescriptor(n0), solrCores.getCoreDescriptor(n1));
log.info("swapped: "+n0 + " with " + n1);
}
/**
* Unload a core from this container, leaving all files on disk
* @param name the name of the core to unload
*/
public void unload(String name) {
unload(name, false, false, false);
}
/**
* Unload a core from this container, optionally removing the core's data and configuration
*
* @param name the name of the core to unload
* @param deleteIndexDir if true, delete the core's index on close
* @param deleteDataDir if true, delete the core's data directory on close
* @param deleteInstanceDir if true, delete the core's instance directory on close
*/
public void unload(String name, boolean deleteIndexDir, boolean deleteDataDir, boolean deleteInstanceDir) {
// check for core-init errors first
CoreLoadFailure loadFailure = coreInitFailures.remove(name);
if (loadFailure != null) {
// getting the index directory requires opening a DirectoryFactory with a SolrConfig, etc,
// which we may not be able to do because of the init error. So we just go with what we
// can glean from the CoreDescriptor - datadir and instancedir
SolrCore.deleteUnloadedCore(loadFailure.cd, deleteDataDir, deleteInstanceDir);
return;
}
CoreDescriptor cd = solrCores.getCoreDescriptor(name);
if (cd == null)
throw new SolrException(ErrorCode.BAD_REQUEST, "Cannot unload non-existent core [" + name + "]");
boolean close = solrCores.isLoadedNotPendingClose(name);
SolrCore core = solrCores.remove(name);
coresLocator.delete(this, cd);
if (core == null) {
// transient core
SolrCore.deleteUnloadedCore(cd, deleteDataDir, deleteInstanceDir);
return;
}
if (zkSys.getZkController() != null) {
// cancel recovery in cloud mode
core.getSolrCoreState().cancelRecovery();
}
String configSetZkPath = core.getResourceLoader() instanceof ZkSolrResourceLoader ? ((ZkSolrResourceLoader)core.getResourceLoader()).getConfigSetZkPath() : null;
core.unloadOnClose(deleteIndexDir, deleteDataDir, deleteInstanceDir);
if (close)
core.close();
if (zkSys.getZkController() != null) {
try {
zkSys.getZkController().unregister(name, cd, configSetZkPath);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new SolrException(ErrorCode.SERVER_ERROR, "Interrupted while unregistering core [" + name + "] from cloud state");
} catch (KeeperException e) {
throw new SolrException(ErrorCode.SERVER_ERROR, "Error unregistering core [" + name + "] from cloud state", e);
}
}
}
public void rename(String name, String toName) {
try (SolrCore core = getCore(name)) {
if (core != null) {
registerCore(toName, core, true);
SolrCore old = solrCores.remove(name);
coresLocator.rename(this, old.getCoreDescriptor(), core.getCoreDescriptor());
}
}
}
/**
* Get the CoreDescriptors for all cores managed by this container
* @return a List of CoreDescriptors
*/
public List<CoreDescriptor> getCoreDescriptors() {
return solrCores.getCoreDescriptors();
}
public CoreDescriptor getCoreDescriptor(String coreName) {
// TODO make this less hideous!
for (CoreDescriptor cd : getCoreDescriptors()) {
if (cd.getName().equals(coreName))
return cd;
}
return null;
}
public String getCoreRootDirectory() {
return cfg.getCoreRootDirectory();
}
/**
* Gets a core by name and increase its refcount.
*
* @see SolrCore#close()
* @param name the core name
* @return the core if found, null if a SolrCore by this name does not exist
* @exception SolrException if a SolrCore with this name failed to be initialized
*/
public SolrCore getCore(String name) {
// Do this in two phases since we don't want to lock access to the cores over a load.
SolrCore core = solrCores.getCoreFromAnyList(name, true);
if (core != null) {
return core;
}
// OK, it's not presently in any list, is it in the list of dynamic cores but not loaded yet? If so, load it.
CoreDescriptor desc = solrCores.getDynamicDescriptor(name);
if (desc == null) { //Nope, no transient core with this name
// if there was an error initalizing this core, throw a 500
// error with the details for clients attempting to access it.
CoreLoadFailure loadFailure = getCoreInitFailures().get(name);
if (null != loadFailure) {
throw new SolrException(ErrorCode.SERVER_ERROR, "SolrCore '" + name +
"' is not available due to init failure: " +
loadFailure.exception.getMessage(), loadFailure.exception);
}
// otherwise the user is simply asking for something that doesn't exist.
return null;
}
// This will put an entry in pending core ops if the core isn't loaded
core = solrCores.waitAddPendingCoreOps(name);
if (isShutDown) return null; // We're quitting, so stop. This needs to be after the wait above since we may come off
// the wait as a consequence of shutting down.
try {
if (core == null) {
if (zkSys.getZkController() != null) {
zkSys.getZkController().throwErrorIfReplicaReplaced(desc);
}
core = create(desc); // This should throw an error if it fails.
}
core.open();
}
finally {
solrCores.removeFromPendingOps(name);
}
return core;
}
public JarRepository getJarRepository(){
return jarRepository;
}
// ---------------- CoreContainer request handlers --------------
protected <T> T createHandler(String handlerClass, Class<T> clazz) {
return loader.newInstance(handlerClass, clazz, null, new Class[] { CoreContainer.class }, new Object[] { this });
}
public CoreAdminHandler getMultiCoreHandler() {
return coreAdminHandler;
}
public CollectionsHandler getCollectionsHandler() {
return collectionsHandler;
}
public InfoHandler getInfoHandler() {
return infoHandler;
}
public String getHostName() {
return this.hostName;
}
/**
* Gets the alternate path for multicore handling:
* This is used in case there is a registered unnamed core (aka name is "") to
* declare an alternate way of accessing named cores.
* This can also be used in a pseudo single-core environment so admins can prepare
* a new version before swapping.
*/
public String getManagementPath() {
return cfg.getManagementPath();
}
public LogWatcher getLogging() {
return logging;
}
/**
* Determines whether the core is already loaded or not but does NOT load the core
*
*/
public boolean isLoaded(String name) {
return solrCores.isLoaded(name);
}
public boolean isLoadedNotPendingClose(String name) {
return solrCores.isLoadedNotPendingClose(name);
}
/**
* Gets a solr core descriptor for a core that is not loaded. Note that if the caller calls this on a
* loaded core, the unloaded descriptor will be returned.
*
* @param cname - name of the unloaded core descriptor to load. NOTE:
* @return a coreDescriptor. May return null
*/
public CoreDescriptor getUnloadedCoreDescriptor(String cname) {
return solrCores.getUnloadedCoreDescriptor(cname);
}
public String getSolrHome() {
return solrHome;
}
public boolean isZooKeeperAware() {
return zkSys.getZkController() != null;
}
public ZkController getZkController() {
return zkSys.getZkController();
}
public ConfigSolr getConfig() {
return cfg;
}
/** The default ShardHandlerFactory used to communicate with other solr instances */
public ShardHandlerFactory getShardHandlerFactory() {
return shardHandlerFactory;
}
public UpdateShardHandler getUpdateShardHandler() {
return updateShardHandler;
}
public SolrResourceLoader getResourceLoader() {
return loader;
}
}
class CloserThread extends Thread {
CoreContainer container;
SolrCores solrCores;
ConfigSolr cfg;
CloserThread(CoreContainer container, SolrCores solrCores, ConfigSolr cfg) {
this.container = container;
this.solrCores = solrCores;
this.cfg = cfg;
}
// It's important that this be the _only_ thread removing things from pendingDynamicCloses!
// This is single-threaded, but I tried a multi-threaded approach and didn't see any performance gains, so
// there's no good justification for the complexity. I suspect that the locking on things like DefaultSolrCoreState
// essentially create a single-threaded process anyway.
@Override
public void run() {
while (! container.isShutDown()) {
synchronized (solrCores.getModifyLock()) { // need this so we can wait and be awoken.
try {
solrCores.getModifyLock().wait();
} catch (InterruptedException e) {
// Well, if we've been told to stop, we will. Otherwise, continue on and check to see if there are
// any cores to close.
}
}
for (SolrCore removeMe = solrCores.getCoreToClose();
removeMe != null && !container.isShutDown();
removeMe = solrCores.getCoreToClose()) {
try {
removeMe.close();
} finally {
solrCores.removeFromPendingOps(removeMe.getName());
}
}
}
}
}
| apache-2.0 |
VHAINNOVATIONS/TheDailyPlan | LegacyApp/tdpWeb/src/main/java/gov/va/medora/mdws/emrsvc/GetDischargeSummaries.java | 2528 |
package gov.va.medora.mdws.emrsvc;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="fromDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="toDate" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="nNotes" type="{http://www.w3.org/2001/XMLSchema}int"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"fromDate",
"toDate",
"nNotes"
})
@XmlRootElement(name = "getDischargeSummaries")
public class GetDischargeSummaries {
protected String fromDate;
protected String toDate;
protected int nNotes;
/**
* Gets the value of the fromDate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFromDate() {
return fromDate;
}
/**
* Sets the value of the fromDate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFromDate(String value) {
this.fromDate = value;
}
/**
* Gets the value of the toDate property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getToDate() {
return toDate;
}
/**
* Sets the value of the toDate property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setToDate(String value) {
this.toDate = value;
}
/**
* Gets the value of the nNotes property.
*
*/
public int getNNotes() {
return nNotes;
}
/**
* Sets the value of the nNotes property.
*
*/
public void setNNotes(int value) {
this.nNotes = value;
}
}
| apache-2.0 |
apache/pig | test/org/apache/pig/test/Util.java | 58248 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import static java.util.regex.Matcher.quoteReplacement;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.log4j.Appender;
import org.apache.log4j.FileAppender;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.PatternLayout;
import org.apache.log4j.SimpleLayout;
import org.apache.log4j.WriterAppender;
import org.apache.pig.ExecType;
import org.apache.pig.ExecTypeProvider;
import org.apache.pig.LoadCaster;
import org.apache.pig.PigException;
import org.apache.pig.PigServer;
import org.apache.pig.ResourceSchema.ResourceFieldSchema;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRCompiler;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
import org.apache.pig.backend.hadoop.executionengine.tez.TezResourceManager;
import org.apache.pig.backend.hadoop.executionengine.util.MapRedUtil;
import org.apache.pig.builtin.Utf8StorageConverter;
import org.apache.pig.data.BagFactory;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.DefaultBagFactory;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.io.FileLocalizer;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
import org.apache.pig.impl.util.LogUtils;
import org.apache.pig.newplan.logical.optimizer.LogicalPlanPrinter;
import org.apache.pig.newplan.logical.optimizer.SchemaResetter;
import org.apache.pig.newplan.logical.optimizer.UidResetter;
import org.apache.pig.newplan.logical.relational.LogToPhyTranslationVisitor;
import org.apache.pig.newplan.logical.relational.LogicalPlan;
import org.apache.pig.newplan.logical.relational.LogicalSchema;
import org.apache.pig.newplan.logical.relational.LogicalSchema.LogicalFieldSchema;
import org.apache.pig.newplan.logical.visitor.DanglingNestedNodeRemover;
import org.apache.pig.newplan.logical.visitor.SortInfoSetter;
import org.apache.pig.newplan.logical.visitor.StoreAliasSetter;
import org.apache.pig.parser.ParserException;
import org.apache.pig.parser.QueryParserDriver;
import org.apache.pig.tools.grunt.GruntParser;
import org.apache.pig.tools.pigstats.ScriptState;
import org.apache.spark.package$;
import org.junit.Assert;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
public class Util {
private static BagFactory mBagFactory = BagFactory.getInstance();
private static TupleFactory mTupleFactory = TupleFactory.getInstance();
// Commonly-checked system state
// =================
public static final boolean WINDOWS /* borrowed from Path.WINDOWS, Shell.WINDOWS */
= System.getProperty("os.name").startsWith("Windows");
public static final String TEST_DIR = System.getProperty("test.build.dir", "build/test");
// Helper Functions
// =================
static public Tuple loadFlatTuple(Tuple t, int[] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
t.set(i, new Integer(input[i]));
}
return t;
}
static public Tuple loadTuple(Tuple t, String[] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
t.set(i, input[i]);
}
return t;
}
static public Tuple loadTuple(Tuple t, DataByteArray[] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
t.set(i, input[i]);
}
return t;
}
static public Tuple loadNestTuple(Tuple t, int[] input) throws ExecException {
DataBag bag = BagFactory.getInstance().newDefaultBag();
for(int i = 0; i < input.length; i++) {
Tuple f = TupleFactory.getInstance().newTuple(1);
f.set(0, input[i]);
bag.add(f);
}
t.set(0, bag);
return t;
}
static public Tuple loadNestTuple(Tuple t, long[] input) throws ExecException {
DataBag bag = BagFactory.getInstance().newDefaultBag();
for(int i = 0; i < input.length; i++) {
Tuple f = TupleFactory.getInstance().newTuple(1);
f.set(0, new Long(input[i]));
bag.add(f);
}
t.set(0, bag);
return t;
}
// this one should handle String, DataByteArray, Long, Integer etc..
static public <T> Tuple loadNestTuple(Tuple t, T[] input) throws ExecException {
DataBag bag = BagFactory.getInstance().newDefaultBag();
for(int i = 0; i < input.length; i++) {
Tuple f = TupleFactory.getInstance().newTuple(1);
f.set(0, input[i]);
bag.add(f);
}
t.set(0, bag);
return t;
}
/**
* Create an array of tuple bags with specified size created by splitting
* the input array of primitive types
*
* @param input Array of primitive types
* @param bagSize The number of tuples to be split and copied into each bag
*
* @return an array of tuple bags with each bag containing bagSize tuples split from the input
*/
static public <T> Tuple[] splitCreateBagOfTuples(T[] input, int bagSize)
throws ExecException {
List<Tuple> result = new ArrayList<Tuple>();
for (int from = 0; from < input.length; from += bagSize) {
Tuple t = TupleFactory.getInstance().newTuple(1);
int to = from + bagSize < input.length ? from + bagSize
: input.length;
T[] array = Arrays.copyOfRange(input, from, to);
result.add(loadNestTuple(t, array));
}
return result.toArray(new Tuple[0]);
}
static public <T>void addToTuple(Tuple t, T[] b)
{
for(int i = 0; i < b.length; i++)
t.append(b[i]);
}
static public Tuple buildTuple(Object... args) throws ExecException {
return TupleFactory.getInstance().newTupleNoCopy(Lists.newArrayList(args));
}
static public Tuple buildBinTuple(final Object... args) throws IOException {
return TupleFactory.getInstance().newTuple(Lists.transform(
Lists.newArrayList(args), new Function<Object, DataByteArray>() {
@Override
public DataByteArray apply(Object o) {
if (o == null) {
return null;
}
try {
return new DataByteArray(DataType.toBytes(o));
} catch (ExecException e) {
return null;
}
}
}));
}
static public <T>Tuple createTuple(T[] s)
{
Tuple t = mTupleFactory.newTuple();
addToTuple(t, s);
return t;
}
static public DataBag createBag(Tuple[] t)
{
DataBag b = mBagFactory.newDefaultBag();
for(int i = 0; i < t.length; i++)b.add(t[i]);
return b;
}
static public<T> DataBag createBagOfOneColumn(T[] input) throws ExecException {
DataBag result = mBagFactory.newDefaultBag();
for (int i = 0; i < input.length; i++) {
Tuple t = mTupleFactory.newTuple(1);
t.set(0, input[i]);
result.add(t);
}
return result;
}
static public Map<String, Object> createMap(String[] contents)
{
Map<String, Object> m = new HashMap<String, Object>();
for(int i = 0; i < contents.length; ) {
m.put(contents[i], contents[i+1]);
i += 2;
}
return m;
}
static public<T> DataByteArray[] toDataByteArrays(T[] input) {
DataByteArray[] dbas = new DataByteArray[input.length];
for (int i = 0; i < input.length; i++) {
dbas[i] = (input[i] == null)?null:new DataByteArray(input[i].toString().getBytes());
}
return dbas;
}
static public Tuple loadNestTuple(Tuple t, int[][] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
DataBag bag = BagFactory.getInstance().newDefaultBag();
Tuple f = loadFlatTuple(TupleFactory.getInstance().newTuple(input[i].length), input[i]);
bag.add(f);
t.set(i, bag);
}
return t;
}
static public Tuple loadTuple(Tuple t, String[][] input) throws ExecException {
for (int i = 0; i < input.length; i++) {
DataBag bag = BagFactory.getInstance().newDefaultBag();
Tuple f = loadTuple(TupleFactory.getInstance().newTuple(input[i].length), input[i]);
bag.add(f);
t.set(i, bag);
}
return t;
}
/**
* Helper to remove colons (if any exist) from paths to sanitize them for
* consumption by hdfs.
*
* @param origPath original path name
* @return String sanitized path with anything prior to : removed
* @throws IOException
*/
static public String removeColon(String origPath)
{
return origPath.replaceAll(":", "");
}
/**
* Helper to convert \r\n to \n for cross-platform string
* matching with checked-in baselines.
*
* @param origPath original string
* @return String newline-standardized string
* @throws IOException
*/
static public String standardizeNewline(String origPath)
{
return origPath.replaceAll("\r\n", "\n");
}
/**
* Helper to create a temporary file with given input data for use in test cases.
*
* @param tmpFilenamePrefix file-name prefix
* @param tmpFilenameSuffix file-name suffix
* @param inputData input for test cases, each string in inputData[] is written
* on one line
* @return {@link File} handle to the created temporary file
* @throws IOException
*/
static public File createInputFile(String tmpFilenamePrefix,
String tmpFilenameSuffix,
String[] inputData)
throws IOException {
File f = File.createTempFile(tmpFilenamePrefix, tmpFilenameSuffix);
f.deleteOnExit();
writeToFile(f, inputData);
return f;
}
static public File createLocalInputFile(String filename, String[] inputData)
throws IOException {
File f = new File(filename);
f.deleteOnExit();
writeToFile(f, inputData);
return f;
}
public static void writeToFile(File f, String[] inputData) throws
IOException {
PrintWriter pw = new PrintWriter(new OutputStreamWriter(new
FileOutputStream(f), "UTF-8"));
for (int i=0; i<inputData.length; i++){
pw.print(inputData[i]);
pw.print("\n");
}
pw.close();
}
/**
* Helper to create a dfs file on the Minicluster DFS with given
* input data for use in test cases.
*
* @param miniCluster reference to the Minicluster where the file should be created
* @param fileName pathname of the file to be created
* @param inputData input for test cases, each string in inputData[] is written
* on one line
* @throws IOException
*/
static public void createInputFile(MiniGenericCluster miniCluster, String fileName,
String[] inputData)
throws IOException {
FileSystem fs = miniCluster.getFileSystem();
createInputFile(fs, fileName, inputData);
}
static public void createInputFile(FileSystem fs, String fileName,
String[] inputData) throws IOException {
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
if(fs.exists(new Path(fileName))) {
throw new IOException("File " + fileName + " already exists on the FileSystem");
}
FSDataOutputStream stream = fs.create(new Path(fileName));
PrintWriter pw = new PrintWriter(new OutputStreamWriter(stream, "UTF-8"));
for (int i=0; i<inputData.length; i++){
pw.print(inputData[i]);
pw.print("\n");
}
pw.close();
}
static public String[] readOutput(FileSystem fs, String fileName) throws IOException {
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
Path path = new Path(fileName);
if(!fs.exists(path)) {
throw new IOException("Path " + fileName + " does not exist on the FileSystem");
}
FileStatus fileStatus = fs.getFileStatus(path);
FileStatus[] files;
if (fileStatus.isDirectory()) {
files = fs.listStatus(path, new PathFilter() {
@Override
public boolean accept(Path p) {
return !p.getName().startsWith("_");
}
});
} else {
files = new FileStatus[] { fileStatus };
}
List<String> result = new ArrayList<String>();
for (FileStatus f : files) {
FSDataInputStream stream = fs.open(f.getPath());
BufferedReader br = new BufferedReader(new InputStreamReader(stream, "UTF-8"));
String line;
while ((line = br.readLine()) != null) {
result.add(line);
}
br.close();
}
return result.toArray(new String[result.size()]);
}
/**
* Helper to create a dfs file on the MiniCluster dfs. This returns an
* outputstream that can be used in test cases to write data.
*
* @param cluster
* reference to the MiniCluster where the file should be created
* @param fileName
* pathname of the file to be created
* @return OutputStream to write any data to the file created on the
* MiniCluster.
* @throws IOException
*/
static public OutputStream createInputFile(MiniGenericCluster cluster,
String fileName) throws IOException {
FileSystem fs = cluster.getFileSystem();
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
if (fs.exists(new Path(fileName))) {
throw new IOException("File " + fileName
+ " already exists on the minicluster");
}
return fs.create(new Path(fileName));
}
/**
* Helper to create an empty temp file on local file system
* which will be deleted on exit
* @param prefix
* @param suffix
* @return File denoting a newly-created empty file
* @throws IOException
*/
static public File createTempFileDelOnExit(String prefix, String suffix)
throws IOException {
File tmpFile = File.createTempFile(prefix, suffix);
tmpFile.deleteOnExit();
return tmpFile;
}
/**
* Helper to remove a dfs file from the minicluster DFS
*
* @param miniCluster reference to the Minicluster where the file should be deleted
* @param fileName pathname of the file to be deleted
* @throws IOException
*/
static public void deleteFile(MiniGenericCluster miniCluster, String fileName)
throws IOException {
FileSystem fs = miniCluster.getFileSystem();
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
fs.delete(new Path(fileName), true);
}
/**
* Deletes a dfs file from the MiniCluster DFS quietly
*
* @param miniCluster the MiniCluster where the file should be deleted
* @param fileName the path of the file to be deleted
*/
public static void deleteQuietly(MiniGenericCluster miniCluster, String fileName) {
try {
deleteFile(miniCluster, fileName);
} catch (IOException ignored) {
}
}
static public void deleteFile(PigContext pigContext, String fileName)
throws IOException {
Configuration conf = ConfigurationUtil.toConfiguration(
pigContext.getProperties());
FileSystem fs = FileSystem.get(conf);
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
fs.delete(new Path(fileName), true);
}
static public boolean exists(PigContext pigContext, String fileName)
throws IOException {
Configuration conf = ConfigurationUtil.toConfiguration(
pigContext.getProperties());
FileSystem fs = FileSystem.get(conf);
if(Util.WINDOWS){
fileName = fileName.replace('\\','/');
}
return fs.exists(new Path(fileName));
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results.
*
* @param actualResults Result of the executed Pig query
* @param expectedResults Expected results Array to validate against
*/
static public void checkQueryOutputs(Iterator<Tuple> actualResults,
Tuple[] expectedResults) {
checkQueryOutputs(actualResults, Arrays.asList(expectedResults));
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results.
*
* @param actualResults Result of the executed Pig query
* @param expectedResults Expected results List to validate against
*/
static public void checkQueryOutputs(Iterator<Tuple> actualResults,
List<Tuple> expectedResults) {
checkQueryOutputs(actualResults, expectedResults.iterator(), null );
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results.
*
* @param actualResults Result of the executed Pig query
* @param expectedResults Expected results List to validate against
*/
static public void checkQueryOutputs(Iterator<Tuple> actualResults,
Iterator<Tuple> expectedResults, Integer expectedRows) {
int count = 0;
while (expectedResults.hasNext()) {
Tuple expected = expectedResults.next();
Assert.assertTrue("Actual result has less records than expected results", actualResults.hasNext());
Tuple actual = actualResults.next();
// If this tuple contains any bags, bags will be sorted before comparisons
if( !expected.equals(actual) ) {
// Using string comparisons since error message is more readable
// (only showing the part which differs)
Assert.assertEquals(expected.toString(), actual.toString());
// if above goes through, simply failing with object comparisons
Assert.assertEquals(expected, actual);
}
count++;
}
Assert.assertFalse("Actual result has more records than expected results", actualResults.hasNext());
if (expectedRows != null) {
Assert.assertEquals((int)expectedRows, count);
}
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results. It sorts actual and expected results before comparison
*
* @param actualResultsIt Result of the executed Pig query
* @param expectedResList Expected results to validate against
*/
static public void checkQueryOutputsAfterSort(Iterator<Tuple> actualResultsIt,
List<Tuple> expectedResList) {
List<Tuple> actualResList = new ArrayList<Tuple>();
while(actualResultsIt.hasNext()){
actualResList.add(actualResultsIt.next());
}
checkQueryOutputsAfterSort(actualResList, expectedResList);
}
/**
* Helper function to check if the result of Pig Query is in line with expected results.
* It sorts actual and expected results before comparison.
* The tuple size in the tuple list can vary. Pass by a two-dimension array, it will be converted to be a tuple list.
* e.g. expectedTwoDimensionObjects is [{{10, "will_join", 10, "will_join"}, {11, "will_not_join", null}, {null, 12, "will_not_join"}}],
* the field size of these 3 tuples are [4,3,3]
*
* @param actualResultsIt
* @param expectedTwoDimensionObjects represents a tuple list, in which the tuple can have variable size.
*/
static public void checkQueryOutputsAfterSort(Iterator<Tuple> actualResultsIt,
Object[][] expectedTwoDimensionObjects) {
List<Tuple> expectedResTupleList = new ArrayList<Tuple>();
for (int i = 0; i < expectedTwoDimensionObjects.length; ++i) {
Tuple t = TupleFactory.getInstance().newTuple();
for (int j = 0; j < expectedTwoDimensionObjects[i].length; ++j) {
t.append(expectedTwoDimensionObjects[i][j]);
}
expectedResTupleList.add(t);
}
checkQueryOutputsAfterSort(actualResultsIt, expectedResTupleList);
}
static public void checkQueryOutputsAfterSort(
List<Tuple> actualResList, List<Tuple> expectedResList) {
Collections.sort(actualResList);
Collections.sort(expectedResList);
checkQueryOutputs(actualResList.iterator(), expectedResList);
}
/**
* Check if subStr is a subString of str . calls org.junit.Assert.fail if it is not
* @param str
* @param subStr
*/
static public void checkStrContainsSubStr(String str, String subStr){
if(!str.contains(subStr)){
fail("String '"+ subStr + "' is not a substring of '" + str + "'");
}
}
/**
* Check if query plan for alias argument produces exception with expected
* error message in expectedErr argument.
* @param query
* @param alias
* @param expectedErr
* @throws IOException
*/
static public void checkExceptionMessage(String query, String alias, String expectedErr)
throws IOException {
PigServer pig = new PigServer(ExecType.LOCAL);
boolean foundEx = false;
try{
Util.registerMultiLineQuery(pig, query);
pig.explain(alias, System.out);
}catch(FrontendException e){
foundEx = true;
checkMessageInException(e, expectedErr);
}
if(!foundEx)
fail("No exception thrown. Exception is expected.");
}
public static void checkMessageInException(FrontendException e,
String expectedErr) {
PigException pigEx = LogUtils.getPigException(e);
String message = pigEx.getMessage();
checkErrorMessageContainsExpected(message, expectedErr);
}
public static void checkErrorMessageContainsExpected(String message, String expectedMessage){
if(!message.contains(expectedMessage)){
String msg = "Expected error message containing '"
+ expectedMessage + "' but got '" + message + "'" ;
fail(msg);
}
}
static private String getFSMkDirCommand(String fileName) {
Path parentDir = new Path(fileName).getParent();
String mkdirCommand = parentDir.getName().isEmpty() ? "" : "fs -mkdir -p " + parentDir + "\n";
return mkdirCommand;
}
/**
* Utility method to copy a file form local filesystem to the dfs on
* the minicluster for testing in mapreduce mode
* @param cluster a reference to the minicluster
* @param localFileName the pathname of local file
* @param fileNameOnCluster the name with which the file should be created on the minicluster
* @throws IOException
*/
static public void copyFromLocalToCluster(MiniGenericCluster cluster,
String localFileName, String fileNameOnCluster) throws IOException {
if(Util.WINDOWS){
if (!localFileName.contains(":")) {
localFileName = localFileName.replace('\\','/');
} else {
localFileName = localFileName.replace('/','\\');
}
fileNameOnCluster = fileNameOnCluster.replace('\\','/');
}
PigServer ps = new PigServer(cluster.getExecType(), cluster.getProperties());
String script = getFSMkDirCommand(fileNameOnCluster) + "fs -put " + localFileName + " " + fileNameOnCluster;
GruntParser parser = new GruntParser(new StringReader(script), ps);
parser.setInteractive(false);
try {
parser.parseStopOnError();
} catch (org.apache.pig.tools.pigscript.parser.ParseException e) {
throw new IOException(e);
}
}
static public void copyFromLocalToLocal(String fromLocalFileName,
String toLocalFileName) throws IOException {
FileUtils.copyFile(new File(fromLocalFileName), new File(toLocalFileName));
}
static public void copyFromClusterToLocal(MiniGenericCluster cluster,
String fileNameOnCluster, String localFileName) throws IOException {
if(Util.WINDOWS){
fileNameOnCluster = fileNameOnCluster.replace('\\','/');
localFileName = localFileName.replace('\\','/');
}
File parent = new File(localFileName).getParentFile();
if (!parent.exists()) {
parent.mkdirs();
}
PrintWriter writer = new PrintWriter(new FileWriter(localFileName));
FileSystem fs = FileSystem.get(ConfigurationUtil.toConfiguration(
cluster.getProperties()));
if(!fs.exists(new Path(fileNameOnCluster))) {
throw new IOException("File " + fileNameOnCluster + " does not exists on the minicluster");
}
String line = null;
FileStatus fst = fs.getFileStatus(new Path(fileNameOnCluster));
if(fst.isDirectory()) {
throw new IOException("Only files from cluster can be copied locally," +
" " + fileNameOnCluster + " is a directory");
}
FSDataInputStream stream = fs.open(new Path(fileNameOnCluster));
BufferedReader reader = new BufferedReader(new InputStreamReader(stream));
while( (line = reader.readLine()) != null) {
writer.println(line);
}
reader.close();
writer.close();
}
static public void printQueryOutput(Iterator<Tuple> actualResults,
Tuple[] expectedResults) {
System.out.println("Expected :") ;
for (Tuple expected : expectedResults) {
System.out.println(expected.toString()) ;
}
System.out.println("---End----") ;
System.out.println("Actual :") ;
while (actualResults.hasNext()) {
System.out.println(actualResults.next().toString()) ;
}
System.out.println("---End----") ;
}
/**
* Helper method to replace all occurrences of "\" with "\\" in a
* string. This is useful to fix the file path string on Windows
* where "\" is used as the path separator.
*
* @param str Any string
* @return The resulting string
*/
public static String encodeEscape(String str) {
String regex = "\\\\";
String replacement = quoteReplacement("\\\\");
return str.replaceAll(regex, replacement);
}
public static String generateURI(String filename, PigContext context)
throws IOException {
if(Util.WINDOWS){
filename = filename.replace('\\','/');
}
if (context.getExecType() == ExecType.MAPREDUCE || context.getExecType().name().equals("TEZ") ||
context.getExecType().name().equals("SPARK")) {
return FileLocalizer.hadoopify(filename, context);
} else if (context.getExecType().isLocal()) {
return filename;
} else {
throw new IllegalStateException("ExecType: " + context.getExecType());
}
}
public static Object getPigConstant(String pigConstantAsString) throws ParserException {
QueryParserDriver queryParser = new QueryParserDriver( new PigContext(),
"util", new HashMap<String, String>() ) ;
return queryParser.parseConstant(pigConstantAsString);
}
/**
* Parse list of strings in to list of tuples, convert quoted strings into
* @param tupleConstants
* @return
* @throws ParserException
*/
public static List<Tuple> getTuplesFromConstantTupleStrings(String[] tupleConstants) throws ParserException {
List<Tuple> result = new ArrayList<Tuple>(tupleConstants.length);
for(int i = 0; i < tupleConstants.length; i++) {
result.add((Tuple) getPigConstant(tupleConstants[i]));
}
return result;
}
/**
* Parse list of strings in to list of tuples, convert quoted strings into
* DataByteArray
* @param tupleConstants
* @return
* @throws ParserException
* @throws ExecException
*/
public static List<Tuple> getTuplesFromConstantTupleStringAsByteArray(String[] tupleConstants)
throws ParserException, ExecException {
List<Tuple> tuples = getTuplesFromConstantTupleStrings(tupleConstants);
for(Tuple t : tuples){
convertStringToDataByteArray(t);
}
return tuples;
}
/**
* Convert String objects in argument t to DataByteArray objects
* @param t
* @throws ExecException
*/
private static void convertStringToDataByteArray(Tuple t) throws ExecException {
if(t == null)
return;
for(int i=0; i<t.size(); i++){
Object col = t.get(i);
if(col == null)
continue;
if(col instanceof String){
DataByteArray dba = (col == null) ?
null : new DataByteArray((String)col);
t.set(i, dba);
}else if(col instanceof Tuple){
convertStringToDataByteArray((Tuple)col);
}else if(col instanceof DataBag){
Iterator<Tuple> it = ((DataBag)col).iterator();
while(it.hasNext()){
convertStringToDataByteArray(it.next());
}
}
}
}
public static File createFile(String[] data) throws Exception{
return createFile(null,data);
}
public static File createFile(String filePath, String[] data) throws Exception {
File f;
if( null == filePath || filePath.isEmpty() ) {
f = File.createTempFile("tmp", "");
} else {
f = new File(filePath);
}
if (f.getParent() != null && !(new File(f.getParent())).exists()) {
(new File(f.getParent())).mkdirs();
}
f.deleteOnExit();
PrintWriter pw = new PrintWriter(f);
for (int i=0; i<data.length; i++){
pw.println(data[i]);
}
pw.close();
return f;
}
/**
* Run default set of optimizer rules on new logical plan
* @param lp
* @return optimized logical plan
* @throws FrontendException
*/
public static LogicalPlan optimizeNewLP(
LogicalPlan lp)
throws FrontendException{
DanglingNestedNodeRemover DanglingNestedNodeRemover = new DanglingNestedNodeRemover( lp );
DanglingNestedNodeRemover.visit();
UidResetter uidResetter = new UidResetter( lp );
uidResetter.visit();
SchemaResetter schemaResetter =
new SchemaResetter( lp, true /*disable duplicate uid check*/ );
schemaResetter.visit();
StoreAliasSetter storeAliasSetter = new StoreAliasSetter( lp );
storeAliasSetter.visit();
// run optimizer
org.apache.pig.newplan.logical.optimizer.LogicalPlanOptimizer optimizer =
new org.apache.pig.newplan.logical.optimizer.LogicalPlanOptimizer(lp, 100, null);
optimizer.optimize();
SortInfoSetter sortInfoSetter = new SortInfoSetter( lp );
sortInfoSetter.visit();
return lp;
}
/**
* migrate old LP(logical plan) to new LP, optimize it, and build physical
* plan
* @param lp
* @param pc PigContext
* @return physical plan
* @throws Exception
*/
public static PhysicalPlan buildPhysicalPlanFromNewLP(
LogicalPlan lp, PigContext pc)
throws Exception {
LogToPhyTranslationVisitor visitor = new LogToPhyTranslationVisitor(lp);
visitor.setPigContext(pc);
visitor.visit();
return visitor.getPhysicalPlan();
}
public static MROperPlan buildMRPlan(PhysicalPlan pp, PigContext pc) throws Exception{
MRCompiler comp = new MRCompiler(pp, pc);
comp.compile();
comp.aggregateScalarsFiles();
comp.connectSoftLink();
return comp.getMRPlan();
}
public static MROperPlan buildMRPlanWithOptimizer(PhysicalPlan pp, PigContext pc) throws Exception {
MapRedUtil.checkLeafIsStore(pp, pc);
MapReduceLauncher launcher = new MapReduceLauncher();
return launcher.compile(pp,pc);
}
public static MROperPlan buildMRPlan(String query, PigContext pc) throws Exception {
LogicalPlan lp = Util.parse(query, pc);
Util.optimizeNewLP(lp);
PhysicalPlan pp = Util.buildPhysicalPlanFromNewLP(lp, pc);
MROperPlan mrp = Util.buildMRPlanWithOptimizer(pp, pc);
return mrp;
}
public static void registerMultiLineQuery(PigServer pigServer, String query) throws IOException {
File f = File.createTempFile("tmp", "");
PrintWriter pw = new PrintWriter(f);
pw.println(query);
pw.close();
pigServer.registerScript(f.getCanonicalPath());
}
public static int executeJavaCommand(String cmd) throws Exception {
return executeJavaCommandAndReturnInfo(cmd).exitCode;
}
public static class ReadStream implements Runnable {
InputStream is;
Thread thread;
String message = "";
public ReadStream(InputStream is) {
this.is = is;
}
public void start () {
thread = new Thread (this);
thread.start ();
}
@Override
public void run () {
try {
InputStreamReader isr = new InputStreamReader (is);
BufferedReader br = new BufferedReader (isr);
while (true) {
String s = br.readLine ();
if (s == null) break;
if (!message.isEmpty()) {
message += "\n";
}
message += s;
}
is.close ();
} catch (Exception ex) {
ex.printStackTrace ();
}
}
public String getMessage() {
return message;
}
}
public static ProcessReturnInfo executeJavaCommandAndReturnInfo(String cmd)
throws Exception {
String javaHome = System.getenv("JAVA_HOME");
if(javaHome != null) {
String fileSeparator = System.getProperty("file.separator");
cmd = javaHome + fileSeparator + "bin" + fileSeparator + cmd;
}
Process cmdProc = Runtime.getRuntime().exec(cmd);
ProcessReturnInfo pri = new ProcessReturnInfo();
ReadStream stdoutStream = new ReadStream(cmdProc.getInputStream ());
ReadStream stderrStream = new ReadStream(cmdProc.getErrorStream ());
stdoutStream.start();
stderrStream.start();
cmdProc.waitFor();
pri.exitCode = cmdProc.exitValue();
pri.stdoutContents = stdoutStream.getMessage();
pri.stderrContents = stderrStream.getMessage();
return pri;
}
public static class ProcessReturnInfo {
public int exitCode;
public String stderrContents;
public String stdoutContents;
@Override
public String toString() {
return "[Exit code: " + exitCode + ", stdout: <" + stdoutContents + ">, " +
"stderr: <" + stderrContents + ">";
}
}
static public boolean deleteDirectory(File path) {
if(path.exists()) {
File[] files = path.listFiles();
for(int i=0; i<files.length; i++) {
if(files[i].isDirectory()) {
deleteDirectory(files[i]);
}
else {
files[i].delete();
}
}
}
return(path.delete());
}
/**
* @param pigContext
* @param fileName
* @param input
* @throws IOException
*/
public static void createInputFile(PigContext pigContext,
String fileName, String[] input) throws IOException {
Configuration conf = ConfigurationUtil.toConfiguration(
pigContext.getProperties());
createInputFile(FileSystem.get(conf), fileName, input);
}
public static String[] readOutput(PigContext pigContext,
String fileName) throws IOException {
Configuration conf = ConfigurationUtil.toConfiguration(
pigContext.getProperties());
return readOutput(FileSystem.get(conf), fileName);
}
public static void printPlan(LogicalPlan logicalPlan ) throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(out);
LogicalPlanPrinter pp = new LogicalPlanPrinter(logicalPlan,ps);
pp.visit();
System.err.println(out.toString());
}
public static void printPlan(PhysicalPlan physicalPlan) throws Exception {
ByteArrayOutputStream out = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(out);
physicalPlan.explain(ps, "text", true);
System.err.println(out.toString());
}
public static List<Tuple> readFile2TupleList(String file, String delimiter) throws IOException{
List<Tuple> tuples=new ArrayList<Tuple>();
String line=null;
BufferedReader reader=new BufferedReader(new InputStreamReader(new FileInputStream(file)));
while((line=reader.readLine())!=null){
String[] tokens=line.split(delimiter);
Tuple tuple=TupleFactory.getInstance().newTuple(Arrays.asList(tokens));
tuples.add(tuple);
}
reader.close();
return tuples;
}
/**
* Delete the existing logFile for the class and set the logging to a
* use a new log file and set log level to DEBUG
* @param clazz class for which the log file is being set
* @param logFile current log file
* @return new log file
* @throws Exception
*/
public static File resetLog(Class<?> clazz, File logFile) throws Exception {
if (logFile != null)
logFile.delete();
Logger logger = Logger.getLogger(clazz);
logger.removeAllAppenders();
logger.setLevel(Level.DEBUG);
SimpleLayout layout = new SimpleLayout();
File newLogFile = File.createTempFile("log", "");
FileAppender appender = new FileAppender(layout, newLogFile.toString(),
false, false, 0);
logger.addAppender(appender);
return newLogFile;
}
/**
* Check if logFile (does not/)contains the given list of messages.
* @param logFile
* @param messages
* @param expected if true, the messages are expected in the logFile,
* otherwise messages should not be there in the log
*/
public static void checkLogFileMessage(File logFile, String[] messages, boolean expected) {
BufferedReader reader = null;
try {
reader = new BufferedReader(new FileReader(logFile));
String logMessage = "";
String line;
while ((line = reader.readLine()) != null) {
logMessage = logMessage + line + "\n";
}
reader.close();
for (int i = 0; i < messages.length; i++) {
boolean present = logMessage.contains(messages[i]);
if (expected) {
if(!present){
fail("The message " + messages[i] + " is not present in" +
"log file contents: " + logMessage);
}
}else{
if(present){
fail("The message " + messages[i] + " is present in" +
"log file contents: " + logMessage);
}
}
}
return ;
}
catch (IOException e) {
fail("caught exception while checking log message :" + e);
}
}
public static LogicalPlan buildLp(PigServer pigServer, String query)
throws Exception {
pigServer.setBatchOn();
pigServer.registerQuery( query );
java.lang.reflect.Method buildLp = pigServer.getClass().getDeclaredMethod("buildLp");
buildLp.setAccessible(true);
return (LogicalPlan ) buildLp.invoke( pigServer );
}
public static PhysicalPlan buildPp(PigServer pigServer, String query)
throws Exception {
LogicalPlan lp = buildLp( pigServer, query );
lp.optimize(pigServer.getPigContext());
return ((HExecutionEngine)pigServer.getPigContext().getExecutionEngine()).compile(lp,
pigServer.getPigContext().getProperties());
}
public static LogicalPlan parse(String query, PigContext pc) throws FrontendException {
Map<String, String> fileNameMap = new HashMap<String, String>();
QueryParserDriver parserDriver = new QueryParserDriver( pc, "test", fileNameMap );
org.apache.pig.newplan.logical.relational.LogicalPlan lp = parserDriver.parse( query );
lp.validate(pc, "test", false);
return lp;
}
public static LogicalPlan parseAndPreprocess(String query, PigContext pc) throws FrontendException {
Map<String, String> fileNameMap = new HashMap<String, String>();
QueryParserDriver parserDriver = new QueryParserDriver( pc, "test", fileNameMap );
org.apache.pig.newplan.logical.relational.LogicalPlan lp = parserDriver.parse( query );
lp.validate(pc, "test", false);
return lp;
}
/**
* Replaces any alias in given schema that has name that starts with
* "NullAlias" with null . it does a case insensitive comparison of
* the alias name
* @param sch
*/
public static void schemaReplaceNullAlias(Schema sch){
if(sch == null)
return ;
for(FieldSchema fs : sch.getFields()){
if(fs.alias != null && fs.alias.toLowerCase().startsWith("nullalias")){
fs.alias = null;
}
schemaReplaceNullAlias(fs.schema);
}
}
static public void checkQueryOutputsAfterSort(Iterator<Tuple> actualResultsIt,
Tuple[] expectedResArray) {
List<Tuple> list = new ArrayList<Tuple>();
Collections.addAll(list, expectedResArray);
checkQueryOutputsAfterSort(actualResultsIt, list);
}
static public void convertBagToSortedBag(Tuple t) {
for (int i=0;i<t.size();i++) {
Object obj = null;
try {
obj = t.get(i);
} catch (ExecException e) {
// shall not happen
}
if (obj instanceof DataBag) {
DataBag bag = (DataBag)obj;
Iterator<Tuple> iter = bag.iterator();
DataBag sortedBag = DefaultBagFactory.getInstance().newSortedBag(null);
while (iter.hasNext()) {
Tuple t2 = iter.next();
sortedBag.add(t2);
convertBagToSortedBag(t2);
}
try {
t.set(i, sortedBag);
} catch (ExecException e) {
// shall not happen
}
}
}
}
static public void checkQueryOutputsAfterSortRecursive(Iterator<Tuple> actualResultsIt,
String[] expectedResArray, String schemaString) throws IOException {
LogicalSchema resultSchema = org.apache.pig.impl.util.Utils.parseSchema(schemaString);
checkQueryOutputsAfterSortRecursive(actualResultsIt, expectedResArray, resultSchema);
}
/**
* Helper function to check if the result of a Pig Query is in line with
* expected results. It sorts actual and expected string results before comparison
*
* @param actualResultsIt Result of the executed Pig query
* @param expectedResArray Expected string results to validate against
* @param schema fieldSchema of expecteResArray
* @throws IOException
*/
static public void checkQueryOutputsAfterSortRecursive(Iterator<Tuple> actualResultsIt,
String[] expectedResArray, LogicalSchema schema) throws IOException {
LogicalFieldSchema fs = new LogicalFieldSchema("tuple", schema, DataType.TUPLE);
ResourceFieldSchema rfs = new ResourceFieldSchema(fs);
LoadCaster caster = new Utf8StorageConverter();
List<Tuple> actualResList = new ArrayList<Tuple>();
while(actualResultsIt.hasNext()){
actualResList.add(actualResultsIt.next());
}
List<Tuple> expectedResList = new ArrayList<Tuple>();
for (String str : expectedResArray) {
Tuple newTuple = caster.bytesToTuple(str.getBytes(), rfs);
expectedResList.add(newTuple);
}
for (Tuple t : actualResList) {
convertBagToSortedBag(t);
}
for (Tuple t : expectedResList) {
convertBagToSortedBag(t);
}
Collections.sort(actualResList);
Collections.sort(expectedResList);
Assert.assertEquals("Comparing actual and expected results. ",
expectedResList, actualResList);
}
public static String readFile(File file) throws IOException {
BufferedReader reader = new BufferedReader(new FileReader(file));
String result = "";
String line;
while ((line=reader.readLine())!=null) {
result += line;
result += "\n";
}
reader.close();
return result;
}
/**
* this removes the signature from the serialized plan changing the way the
* unique signature is generated should not break this test
* @param plan the plan to canonicalize
* @return the cleaned up plan
*/
public static String removeSignature(String plan) {
return plan.replaceAll("','','[^']*','scope','true'\\)\\)", "','','','scope','true'))");
}
public static boolean isHadoop203plus() {
String version = org.apache.hadoop.util.VersionInfo.getVersion();
if (version.matches("\\b0\\.20\\.2\\b"))
return false;
return true;
}
public static boolean isHadoop205() {
String version = org.apache.hadoop.util.VersionInfo.getVersion();
if (version.matches("\\b0\\.20\\.205\\..+"))
return true;
return false;
}
public static boolean isHadoop1_x() {
String version = org.apache.hadoop.util.VersionInfo.getVersion();
if (version.matches("\\b1\\.*\\..+"))
return true;
return false;
}
public static boolean isSpark2_2_plus() throws IOException {
String sparkVersion = package$.MODULE$.SPARK_VERSION();
return sparkVersion != null && sparkVersion.matches("2\\.([\\d&&[^01]]|[\\d]{2,})\\..*");
}
public static void sortQueryOutputsIfNeed(List<Tuple> actualResList, boolean toSort){
if( toSort == true) {
for (Tuple t : actualResList) {
Util.convertBagToSortedBag(t);
}
Collections.sort(actualResList);
}
}
public static void checkQueryOutputs(Iterator<Tuple> actualResults, List<Tuple> expectedResults, boolean checkAfterSort) {
if (checkAfterSort) {
checkQueryOutputsAfterSort(actualResults, expectedResults);
} else {
checkQueryOutputs(actualResults, expectedResults);
}
}
static public void checkQueryOutputs(Iterator<Tuple> actualResultsIt,
String[] expectedResArray, LogicalSchema schema, boolean
checkAfterSort) throws IOException {
if (checkAfterSort) {
checkQueryOutputsAfterSortRecursive(actualResultsIt,
expectedResArray, schema);
} else {
checkQueryOutputs(actualResultsIt,
expectedResArray, schema);
}
}
static void checkQueryOutputs(Iterator<Tuple> actualResultsIt,
String[] expectedResArray, LogicalSchema schema) throws IOException {
LogicalFieldSchema fs = new LogicalFieldSchema("tuple", schema, DataType.TUPLE);
ResourceFieldSchema rfs = new ResourceFieldSchema(fs);
LoadCaster caster = new Utf8StorageConverter();
List<Tuple> actualResList = new ArrayList<Tuple>();
while (actualResultsIt.hasNext()) {
actualResList.add(actualResultsIt.next());
}
List<Tuple> expectedResList = new ArrayList<Tuple>();
for (String str : expectedResArray) {
Tuple newTuple = caster.bytesToTuple(str.getBytes(), rfs);
expectedResList.add(newTuple);
}
for (Tuple t : actualResList) {
convertBagToSortedBag(t);
}
for (Tuple t : expectedResList) {
convertBagToSortedBag(t);
}
Assert.assertEquals("Comparing actual and expected results. ",
expectedResList, actualResList);
}
public static void assertParallelValues(long defaultParallel,
long requestedParallel,
long estimatedParallel,
long runtimeParallel,
Configuration conf) {
assertConfLong(conf, "pig.info.reducers.default.parallel", defaultParallel);
assertConfLong(conf, "pig.info.reducers.requested.parallel", requestedParallel);
assertConfLong(conf, "pig.info.reducers.estimated.parallel", estimatedParallel);
assertConfLong(conf, MRConfiguration.REDUCE_TASKS, runtimeParallel);
}
public static void assertConfLong(Configuration conf, String param, long expected) {
assertEquals("Unexpected value found in configs for " + param, expected, conf.getLong(param, -1));
}
/**
* Returns a PathFilter that filters out filenames that start with _.
* @return PathFilter
*/
public static PathFilter getSuccessMarkerPathFilter() {
return new PathFilter() {
@Override
public boolean accept(Path p) {
return !p.getName().startsWith("_");
}
};
}
/**
*
* @param expected
* Exception class that is expected to be thrown
* @param found
* Exception that occurred in the test
* @param message
* expected String to verify against
*/
public static void assertExceptionAndMessage(Class<?> expected,
Exception found, String message) {
assertEquals(expected, found.getClass());
assertEquals(found.getMessage(), message);
}
/**
* Called to reset ThreadLocal or static states that PigServer depends on
* when a test suite has testcases switching between LOCAL and MAPREDUCE/TEZ
* execution modes
*/
public static void resetStateForExecModeSwitch() {
FileLocalizer.setInitialized(false);
// For tez testing, we want to avoid TezResourceManager/LocalResource reuse
// (when switching between local and mapreduce/tez)
TezResourceManager.dropInstance();
// TODO: once we have Tez local mode, we can get rid of this. For now,
// if we run this test suite in Tez mode and there are some tests
// in LOCAL mode, we need to set ScriptState to
// null to force ScriptState gets initialized every time.
ScriptState.start(null);
}
public static boolean isMapredExecType(ExecType execType) {
return execType == ExecType.MAPREDUCE;
}
public static boolean isTezExecType(ExecType execType) {
if (execType.name().toLowerCase().startsWith("tez")) {
return true;
}
return false;
}
public static boolean isSparkExecType(ExecType execType) {
if (execType.name().toLowerCase().startsWith("spark")) {
return true;
}
return false;
}
public static String findPigJarName() {
final String suffix = System.getProperty("hadoopversion").equals("20") ? "1" : "2";
File baseDir = new File(".");
String[] jarNames = baseDir.list(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
if (!name.matches("pig.*h" + suffix + "\\.jar")) {
return false;
}
if (name.contains("all")) {
return false;
}
return true;
}
});
if (jarNames==null || jarNames.length!=1) {
throw new RuntimeException("Cannot find pig.jar");
}
return jarNames[0];
}
public static ExecType getLocalTestMode() throws Exception {
String execType = System.getProperty("test.exec.type");
if (execType != null) {
if (execType.equals("tez")) {
return ExecTypeProvider.fromString("tez_local");
} else if (execType.equals("spark")) {
return ExecTypeProvider.fromString("spark_local");
}
}
return ExecTypeProvider.fromString("local");
}
public static void createLogAppender(String appenderName, Writer writer, Class...clazzes) {
WriterAppender writerAppender = new WriterAppender(new PatternLayout("%d [%t] %-5p %c %x - %m%n"), writer);
writerAppender.setName(appenderName);
for (Class clazz : clazzes) {
Logger logger = Logger.getLogger(clazz);
logger.addAppender(writerAppender);
}
}
public static void removeLogAppender(String appenderName, Class...clazzes) {
for (Class clazz : clazzes) {
Logger logger = Logger.getLogger(clazz);
Appender appender = logger.getAppender(appenderName);
appender.close();
logger.removeAppender(appenderName);
}
}
public static Path getFirstPartFile(Path path) throws Exception {
FileStatus[] parts = FileSystem.get(path.toUri(), new Configuration()).listStatus(path,
new PathFilter() {
@Override
public boolean accept(Path path) {
return path.getName().startsWith("part-");
}
});
return parts[0].getPath();
}
public static File getFirstPartFile(File dir) throws Exception {
File[] parts = dir.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.startsWith("part-");
};
});
return parts[0];
}
@SuppressWarnings("rawtypes")
public static String getTestDirectory(Class testClass) {
return TEST_DIR + Path.SEPARATOR + "testdata" + Path.SEPARATOR +testClass.getSimpleName();
}
}
| apache-2.0 |
devspark-com/aws-lambda-deploy | src/main/java/org/devspark/aws/tools/AWSAPIGatewayDeployer.java | 5913 | package org.devspark.aws.tools;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.devspark.aws.lambdasupport.endpoint.annotations.apigateway.ApiGateway;
import org.devspark.aws.lambdasupport.endpoint.annotations.apigateway.Resource;
import org.devspark.aws.lambdasupport.endpoint.annotations.apigateway.ResourceMethod;
import org.devspark.aws.tools.model.resources.EndpointResource;
import org.devspark.aws.tools.model.resources.EndpointResourceMethod;
import org.devspark.aws.tools.model.resources.EndpointResourceMethodParameter;
import org.devspark.aws.tools.model.resources.EndpointResponse;
import org.devspark.aws.tools.model.resources.EndpointResponseHeader;
import org.devspark.aws.tools.model.resources.EndpointResponseSchema;
import org.devspark.aws.tools.swagger.SwaggerFileWriter;
import org.devspark.aws.tools.swagger.VelocitySwaggerFileWriter;
import org.reflections.ReflectionUtils;
import org.reflections.Reflections;
import org.reflections.scanners.SubTypesScanner;
import org.reflections.scanners.TypeAnnotationsScanner;
import org.reflections.util.ClasspathHelper;
import org.reflections.util.ConfigurationBuilder;
@Mojo(name = "apigateway-deployer")
public class AWSAPIGatewayDeployer extends AbstractMojo {
@Parameter(property = "base-package")
private String basePackage;
private SwaggerFileWriter fileWriter = new VelocitySwaggerFileWriter();
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
Reflections reflections = new Reflections(new ConfigurationBuilder()
.setUrls(ClasspathHelper.forPackage(basePackage)).setScanners(
new SubTypesScanner(), new TypeAnnotationsScanner()));
Set<Class<?>> resources = reflections
.getTypesAnnotatedWith(Resource.class);
Set<Class<?>> apis = reflections
.getTypesAnnotatedWith(ApiGateway.class);
Map<String, EndpointResource> endpointResources = getEndpointResources(resources);
String apiName = getApiName(apis);
fileWriter.createSwaggerFile(new ArrayList<EndpointResource>(endpointResources.values()), apiName);
}
private String getApiName(Set<Class<?>> apis) {
if (apis.size() != 1) {
getLog().warn("Invalid number of @ApiGateway found.");
}
return apis.iterator().next().getAnnotationsByType(ApiGateway.class)[0].name();
}
@SuppressWarnings("unchecked")
private Map<String, EndpointResource> getEndpointResources(Set<Class<?>> resources) {
Map<String, EndpointResource> endpointResources = new HashMap<String, EndpointResource>();
for (Class<?> type : resources) {
Set<Method> resourceMethods = ReflectionUtils.getAllMethods(type,
ReflectionUtils.withAnnotation(ResourceMethod.class));
if (resourceMethods.isEmpty()) {
getLog().warn(
"No methods annotated with @Resource found in type: "
+ type.getName());
continue;
}
for (Method method : resourceMethods) {
Resource methodResource = method.getAnnotation(Resource.class);
String resourceName = type.getAnnotationsByType(Resource.class)[0].name();
if(methodResource != null) {
resourceName = resourceName + "/" + methodResource.name();
}
EndpointResourceMethod endpointMethod = createMethodResource(method, resourceName);
EndpointResource endpointResource = endpointResources.get(resourceName);
if (endpointResource == null) {
endpointResource = new EndpointResource();
endpointResource.setName(resourceName);
endpointResource.setMethods(new ArrayList<EndpointResourceMethod>());
endpointResources.put(resourceName, endpointResource);
}
endpointResource.getMethods().add(endpointMethod);
}
}
return endpointResources;
}
private EndpointResourceMethod createMethodResource(Method method, String resourceName) {
EndpointResourceMethod endpointMethod = new EndpointResourceMethod();
ResourceMethod resourceMethod = method.getAnnotation(ResourceMethod.class);
endpointMethod.setVerb(resourceMethod.httpMethod().name());
endpointMethod.setParameters(getParameters(resourceName));
endpointMethod.setProduces(Arrays.asList("application/json"));
endpointMethod.setResponses(getMethodResponses());
return endpointMethod;
}
//TODO: Replace mocked list with the generation of the responses of the method.
private List<EndpointResponse> getMethodResponses() {
List<EndpointResponse> responses = new ArrayList<EndpointResponse>();
EndpointResponse sucessfulResponse = new EndpointResponse();
sucessfulResponse.setHttpStatus("200");
sucessfulResponse.setDescription("200 response");
sucessfulResponse.setHeaders(new EndpointResponseHeader());
EndpointResponseSchema schema = new EndpointResponseSchema();
schema.setRef("#/definitions/Empty");
sucessfulResponse.setSchema(schema);
return responses;
}
private List<EndpointResourceMethodParameter> getParameters(String resourceName) {
String pattern = "\\{[a-zA-A]*\\}";
Pattern r = Pattern.compile(pattern);
List<EndpointResourceMethodParameter> parameters = new ArrayList<EndpointResourceMethodParameter>();
Matcher m = r.matcher(resourceName);
while(m.find()){
EndpointResourceMethodParameter parameter = new EndpointResourceMethodParameter();
parameter.setName(m.group(0).replaceAll("\\{*\\}*", ""));
//TODO: Review how to populate the parameter metadata.
parameter.setRequired(true);
parameter.setType("string");
parameter.setIn("path");
parameters.add(parameter);
}
return parameters;
}
}
| apache-2.0 |
jwren/intellij-community | platform/lang-api/src/com/intellij/ide/actions/ElementCreator.java | 4462 | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.actions;
import com.intellij.CommonBundle;
import com.intellij.history.LocalHistory;
import com.intellij.history.LocalHistoryAction;
import com.intellij.ide.IdeBundle;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.application.WriteActionAware;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.UndoConfirmationPolicy;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.SmartPointerManager;
import com.intellij.psi.SmartPsiElementPointer;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* @author peter
*/
public abstract class ElementCreator implements WriteActionAware {
private static final Logger LOG = Logger.getInstance(ElementCreator.class);
private final Project myProject;
private final @NlsContexts.DialogTitle String myErrorTitle;
protected ElementCreator(Project project, @NotNull @NlsContexts.DialogTitle String errorTitle) {
myProject = project;
myErrorTitle = errorTitle;
}
protected abstract PsiElement @NotNull [] create(@NotNull String newName) throws Exception;
@NlsContexts.Command
@NotNull
protected abstract String getActionName(@NotNull String newName);
public @NotNull PsiElement @NotNull [] tryCreate(@NotNull final String inputString) {
if (inputString.isEmpty()) {
Messages.showMessageDialog(myProject, IdeBundle.message("error.name.should.be.specified"), CommonBundle.getErrorTitle(),
Messages.getErrorIcon());
return PsiElement.EMPTY_ARRAY;
}
Ref<List<SmartPsiElementPointer<?>>> createdElements = Ref.create();
Exception exception = executeCommand(getActionName(inputString), () -> {
PsiElement[] psiElements = create(inputString);
SmartPointerManager manager = SmartPointerManager.getInstance(myProject);
createdElements.set(ContainerUtil.map(psiElements, manager::createSmartPsiElementPointer));
});
if (exception != null) {
handleException(exception);
return PsiElement.EMPTY_ARRAY;
}
return ContainerUtil.mapNotNull(createdElements.get(), SmartPsiElementPointer::getElement).toArray(PsiElement.EMPTY_ARRAY);
}
@Nullable
private Exception executeCommand(@NotNull @NlsContexts.Command String commandName, @NotNull ThrowableRunnable<? extends Exception> invokeCreate) {
final Exception[] exception = new Exception[1];
CommandProcessor.getInstance().executeCommand(myProject, () -> {
LocalHistoryAction action = LocalHistory.getInstance().startAction(commandName);
try {
if (startInWriteAction()) {
WriteAction.run(invokeCreate);
}
else {
invokeCreate.run();
}
}
catch (Exception ex) {
exception[0] = ex;
}
finally {
action.finish();
}
}, commandName, null, UndoConfirmationPolicy.REQUEST_CONFIRMATION);
return exception[0];
}
private void handleException(Exception t) {
LOG.info(t);
String errorMessage = getErrorMessage(t);
Messages.showMessageDialog(myProject, errorMessage, myErrorTitle, Messages.getErrorIcon());
}
public static @NlsContexts.DialogMessage String getErrorMessage(Throwable t) {
String errorMessage = CreateElementActionBase.filterMessage(t.getMessage());
if (StringUtil.isEmpty(errorMessage)) {
errorMessage = t.toString();
}
return errorMessage;
}
}
| apache-2.0 |
pmoerenhout/camel | components/camel-huaweicloud-smn/src/main/java/org/apache/camel/component/huaweicloud/smn/SimpleNotificationProducer.java | 16562 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.huaweicloud.smn;
import java.util.HashMap;
import com.huaweicloud.sdk.core.auth.BasicCredentials;
import com.huaweicloud.sdk.core.http.HttpConfig;
import com.huaweicloud.sdk.smn.v2.SmnClient;
import com.huaweicloud.sdk.smn.v2.model.PublishMessageRequest;
import com.huaweicloud.sdk.smn.v2.model.PublishMessageRequestBody;
import com.huaweicloud.sdk.smn.v2.model.PublishMessageResponse;
import org.apache.camel.Exchange;
import org.apache.camel.component.huaweicloud.smn.constants.SmnConstants;
import org.apache.camel.component.huaweicloud.smn.constants.SmnOperations;
import org.apache.camel.component.huaweicloud.smn.constants.SmnProperties;
import org.apache.camel.component.huaweicloud.smn.constants.SmnServices;
import org.apache.camel.component.huaweicloud.smn.models.ClientConfigurations;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SimpleNotificationProducer extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(SimpleNotificationProducer.class);
private SmnClient smnClient;
private ClientConfigurations clientConfigurations;
public SimpleNotificationProducer(SimpleNotificationEndpoint endpoint) {
super(endpoint);
}
@Override
protected void doStart() throws Exception {
super.doStart();
validateAndInitializeSmnClient((SimpleNotificationEndpoint) super.getEndpoint());
}
public void process(Exchange exchange) throws Exception {
String service = ((SimpleNotificationEndpoint) super.getEndpoint()).getSmnService();
if (!ObjectHelper.isEmpty(service)) {
switch (service) {
case SmnServices.PUBLISH_MESSAGE:
if (LOG.isDebugEnabled()) {
LOG.debug("Using message publishing service");
}
performPublishMessageServiceOperations((SimpleNotificationEndpoint) super.getEndpoint(), exchange);
if (LOG.isDebugEnabled()) {
LOG.debug("Completed publishing message");
}
break;
default:
if (LOG.isErrorEnabled()) {
LOG.error("Unsupported service name {}", service);
}
throw new UnsupportedOperationException(String.format("service %s is not a supported service", service));
}
} else {
if (LOG.isErrorEnabled()) {
LOG.error("Service name is null/empty");
}
throw new IllegalStateException("service name cannot be null/empty");
}
}
/**
* Publish message service operations
*
* @param endpoint
* @param exchange
*/
private void performPublishMessageServiceOperations(SimpleNotificationEndpoint endpoint, Exchange exchange) {
PublishMessageResponse response;
PublishMessageRequestBody apiBody;
this.clientConfigurations = validateServiceConfigurations(endpoint, exchange);
if (LOG.isDebugEnabled()) {
LOG.debug("Checking operation name");
}
switch (clientConfigurations.getOperation()) {
case SmnOperations.PUBLISH_AS_TEXT_MESSAGE:
if (LOG.isDebugEnabled()) {
LOG.debug("Publishing as text message");
}
apiBody = new PublishMessageRequestBody()
.withMessage(exchange.getMessage().getBody(String.class))
.withSubject(clientConfigurations.getSubject())
.withTimeToLive(String.valueOf(clientConfigurations.getMessageTtl()));
response = smnClient.publishMessage(new PublishMessageRequest()
.withBody(apiBody)
.withTopicUrn(clientConfigurations.getTopicUrn()));
break;
case SmnOperations.PUBLISH_AS_TEMPLATED_MESSAGE:
if (LOG.isDebugEnabled()) {
LOG.debug("Publishing as templated message");
}
apiBody = new PublishMessageRequestBody()
.withMessage(exchange.getMessage().getBody(String.class))
.withSubject(clientConfigurations.getSubject())
.withTimeToLive(String.valueOf(clientConfigurations.getMessageTtl()))
.withMessageTemplateName((String) exchange.getProperty(SmnProperties.TEMPLATE_NAME))
.withTags((HashMap<String, String>) exchange.getProperty(SmnProperties.TEMPLATE_TAGS))
.withTimeToLive(String.valueOf(clientConfigurations.getMessageTtl()));
response = smnClient.publishMessage(new PublishMessageRequest()
.withBody(apiBody)
.withTopicUrn(clientConfigurations.getTopicUrn()));
break;
default:
throw new UnsupportedOperationException(
String.format("operation %s not supported in publishMessage service",
clientConfigurations.getOperation()));
}
setResponseParameters(exchange, response);
}
/**
* maps api response parameters as exchange property
*
* @param exchange
* @param response
*/
private void setResponseParameters(Exchange exchange, PublishMessageResponse response) {
if (response == null) {
return; // mapping is not required if response object is null
}
if (!ObjectHelper.isEmpty(response.getMessageId())) {
exchange.setProperty(SmnProperties.SERVICE_MESSAGE_ID, response.getMessageId());
}
if (!ObjectHelper.isEmpty(response.getRequestId())) {
exchange.setProperty(SmnProperties.SERVICE_REQUEST_ID, response.getRequestId());
}
}
/**
* validation and initialization of SmnClient object
*
* @param simpleNotificationEndpoint
*/
private void validateAndInitializeSmnClient(SimpleNotificationEndpoint simpleNotificationEndpoint) {
if (simpleNotificationEndpoint.getSmnClient() != null) {
if (LOG.isWarnEnabled()) {
LOG.warn(
"Instance of SmnClient was set on the endpoint. Skipping creation of SmnClient from endpoint parameters");
}
this.smnClient = simpleNotificationEndpoint.getSmnClient();
return;
}
this.clientConfigurations = new ClientConfigurations();
//checking for cloud SK (secret key)
if (ObjectHelper.isEmpty(simpleNotificationEndpoint.getSecretKey()) &&
ObjectHelper.isEmpty(simpleNotificationEndpoint.getServiceKeys())) {
if (LOG.isErrorEnabled()) {
LOG.error("secret key (SK) not found");
}
throw new IllegalArgumentException("authentication parameter 'secret key (SK)' not found");
} else {
clientConfigurations.setSecretKey(simpleNotificationEndpoint.getSecretKey() != null
? simpleNotificationEndpoint.getSecretKey() : simpleNotificationEndpoint.getServiceKeys().getSecretKey());
}
//checking for cloud AK (auth key)
if (ObjectHelper.isEmpty(simpleNotificationEndpoint.getAuthKey()) &&
ObjectHelper.isEmpty(simpleNotificationEndpoint.getServiceKeys())) {
if (LOG.isErrorEnabled()) {
LOG.error("authentication key (AK) not found");
}
throw new IllegalArgumentException("authentication parameter 'authentication key (AK)' not found");
} else {
clientConfigurations.setAuthenticationkey(simpleNotificationEndpoint.getAuthKey() != null
? simpleNotificationEndpoint.getAuthKey()
: simpleNotificationEndpoint.getServiceKeys().getAuthenticationKey());
}
//checking for project ID
if (ObjectHelper.isEmpty(simpleNotificationEndpoint.getProjectId())) {
if (LOG.isErrorEnabled()) {
LOG.error("Project ID not found");
}
throw new IllegalArgumentException("project ID not found");
} else {
clientConfigurations.setProjectId(simpleNotificationEndpoint.getProjectId());
}
//checking for region
String endpointUrl = SimpleNotificationUtils.resolveSmnServiceEndpoint(simpleNotificationEndpoint.getRegion());
if (endpointUrl == null) {
if (LOG.isErrorEnabled()) {
LOG.error("Valid region not found");
}
throw new IllegalArgumentException("enter a valid region");
} else {
clientConfigurations.setServiceEndpoint(endpointUrl);
}
//checking for ignore ssl verification
boolean ignoreSslVerification = simpleNotificationEndpoint.isIgnoreSslVerification();
if (ignoreSslVerification) {
if (LOG.isWarnEnabled()) {
LOG.warn("SSL verification is ignored. This is unsafe in production environment");
}
clientConfigurations.setIgnoreSslVerification(ignoreSslVerification);
}
//checking if http proxy authentication is used
if (simpleNotificationEndpoint.getProxyHost() != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Reading http proxy configurations");
}
clientConfigurations.setProxyHost(simpleNotificationEndpoint.getProxyHost());
clientConfigurations.setProxyPort(simpleNotificationEndpoint.getProxyPort());
clientConfigurations.setProxyUser(simpleNotificationEndpoint.getProxyUser());
clientConfigurations.setProxyPassword(simpleNotificationEndpoint.getProxyPassword());
}
this.smnClient = initializeClient(clientConfigurations);
}
/**
* initialization of smn client. this is lazily initialized on the first message
*
* @param clientConfigurations
* @return
*/
private SmnClient initializeClient(ClientConfigurations clientConfigurations) {
if (LOG.isDebugEnabled()) {
LOG.debug("Initializing Smn client");
}
HttpConfig httpConfig = null;
if (clientConfigurations.getProxyHost() != null) {
httpConfig = HttpConfig.getDefaultHttpConfig();
httpConfig.withProxyHost(clientConfigurations.getProxyHost())
.withProxyPort(clientConfigurations.getProxyPort())
.setIgnoreSSLVerification(clientConfigurations.isIgnoreSslVerification());
if (clientConfigurations.getProxyUser() != null) {
httpConfig.withProxyUsername(clientConfigurations.getProxyUser());
httpConfig.withProxyPassword(clientConfigurations.getProxyPassword());
}
}
BasicCredentials credentials = new BasicCredentials()
.withAk(clientConfigurations.getAuthenticationkey())
.withSk(clientConfigurations.getSecretKey())
.withProjectId(clientConfigurations.getProjectId());
if (LOG.isDebugEnabled()) {
LOG.debug("Building Smn client");
}
// building smn client object
SmnClient smnClient = SmnClient.newBuilder()
.withCredential(credentials)
.withHttpConfig(httpConfig)
.withEndpoint(clientConfigurations.getServiceEndpoint())
.build();
if (LOG.isDebugEnabled()) {
LOG.debug("Successfully initialized Smn client");
}
return smnClient;
}
/**
* validation of all user inputs before attempting to invoke a service operation
*
* @param simpleNotificationEndpoint
* @param exchange
* @return
*/
private ClientConfigurations validateServiceConfigurations(
SimpleNotificationEndpoint simpleNotificationEndpoint, Exchange exchange) {
ClientConfigurations clientConfigurations = new ClientConfigurations();
if (LOG.isDebugEnabled()) {
LOG.debug("Inspecting exchange body");
}
// verifying if exchange has valid body content. this is mandatory for 'publish as text' operation
if (ObjectHelper.isEmpty(exchange.getMessage().getBody())) {
if (simpleNotificationEndpoint.getOperation().equals("publishAsTextMessage")) {
if (LOG.isErrorEnabled()) {
LOG.error("Found null/empty body. Cannot perform publish as text operation");
}
throw new IllegalArgumentException("exchange body cannot be null / empty");
}
}
// checking for mandatory field 'operation name'
if (LOG.isDebugEnabled()) {
LOG.debug("Inspecting operation name");
}
if (ObjectHelper.isEmpty(exchange.getProperty(SmnProperties.SMN_OPERATION))
&& ObjectHelper.isEmpty(simpleNotificationEndpoint.getOperation())) {
if (LOG.isErrorEnabled()) {
LOG.error("Found null/empty operation name. Cannot proceed with Smn operations");
}
throw new IllegalArgumentException("operation name not found");
} else {
clientConfigurations.setOperation(exchange.getProperty(SmnProperties.SMN_OPERATION) != null
? (String) exchange.getProperty(SmnProperties.SMN_OPERATION) : simpleNotificationEndpoint.getOperation());
}
// checking for mandatory field 'topic name'
if (LOG.isDebugEnabled()) {
LOG.debug("Inspecting topic name");
}
if (ObjectHelper.isEmpty(exchange.getProperty(SmnProperties.NOTIFICATION_TOPIC_NAME))) {
if (LOG.isErrorEnabled()) {
LOG.error("Found null/empty topic name");
}
throw new IllegalArgumentException("topic name not found");
} else {
clientConfigurations.setTopicUrn(String.format(SmnConstants.TOPIC_URN_FORMAT,
simpleNotificationEndpoint.getRegion(), simpleNotificationEndpoint.getProjectId(),
exchange.getProperty(SmnProperties.NOTIFICATION_TOPIC_NAME)));
}
// checking for optional field 'message subject'
if (LOG.isDebugEnabled()) {
LOG.debug("Inspecting notification subject value");
}
if (ObjectHelper.isEmpty(exchange.getProperty(SmnProperties.NOTIFICATION_SUBJECT))) {
if (LOG.isWarnEnabled()) {
LOG.warn("notification subject not found. defaulting to 'DEFAULT_SUBJECT'");
}
clientConfigurations.setSubject("DEFAULT_SUBJECT");
} else {
clientConfigurations.setSubject((String) exchange.getProperty(SmnProperties.NOTIFICATION_SUBJECT));
}
// checking for optional field 'message ttl'
if (LOG.isDebugEnabled()) {
LOG.debug("Inspecting TTL");
}
if (ObjectHelper.isEmpty(exchange.getProperty(SmnProperties.NOTIFICATION_TTL))) {
if (LOG.isWarnEnabled()) {
LOG.warn("TTL not found. defaulting to default value {}", simpleNotificationEndpoint.getMessageTtl());
}
clientConfigurations.setMessageTtl(simpleNotificationEndpoint.getMessageTtl());
} else {
clientConfigurations.setMessageTtl((int) exchange.getProperty(SmnProperties.NOTIFICATION_TTL));
}
return clientConfigurations;
}
}
| apache-2.0 |
TuWei1992/zswxsqxt | src/main/zswxsqxt/com/zswxsqxt/wf/dao/WfActivityDao.java | 3115 | package com.zswxsqxt.wf.dao;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Repository;
import cn.org.rapid_framework.page.Page;
import com.opendata.common.base.BaseHibernateDao;
import com.zswxsqxt.wf.model.WfActivity;
import com.zswxsqxt.wf.model.WfProject;
import com.zswxsqxt.wf.query.WfActivityQuery;
/**
describe:流程节点表Dao
*/
@Repository
public class WfActivityDao extends BaseHibernateDao<WfActivity,String>
{
public Class getEntityClass()
{
return WfActivity.class;
}
/**
通过WfActivityQuery对象,查询流程节点表
*/
public Page findPage(WfActivityQuery query,int pageSize,int pageNum)
{
StringBuilder hql=new StringBuilder();
hql.append(" from WfActivity ett where 1=1");
List param=new ArrayList();
if(query!=null)
{
if(!StringUtils.isEmpty(query.getId()))
{
hql.append(" and ett.id=?");
param.add(query.getId());
}
if(!StringUtils.isEmpty(query.getName()))
{
hql.append(" and ett.name like ?");
param.add("%"+query.getName()+"%");
}
if(query.getOrderNum()!=null)
{
hql.append(" and ett.orderNum=?");
param.add(query.getOrderNum());
}
if(query.getActType()!=null)
{
hql.append(" and ett.actType=?");
param.add(query.getActType());
}
if(query.getActFlag()!=null)
{
hql.append(" and ett.actFlag=?");
param.add(query.getActFlag());
}
if(!StringUtils.isEmpty(query.getDescription()))
{
hql.append(" and ett.description=?");
param.add(query.getDescription());
}
if(!StringUtils.isEmpty(query.getUrl()))
{
hql.append(" and ett.url=?");
param.add(query.getUrl());
}
if(!StringUtils.isEmpty(query.getGroupFlag()))
{
hql.append(" and ett.groupFlag=?");
param.add(query.getGroupFlag());
}
if(!StringUtils.isEmpty(query.getExtFiled3()))
{
hql.append(" and ett.extFiled3=?");
param.add(query.getExtFiled3());
}
if(query.getTs()!=null)
{
hql.append(" and ett.ts=?");
param.add(query.getTs());
}
if(query.getWfProject()!=null)
{
hql.append(" and ett.wfProject.id=?");
param.add(query.getWfProject().getId());
}
if(query.getWfInstance()!=null)
{
hql.append(" and ett.wfInstance=?");
param.add(query.getWfInstance());
}
}
if(!StringUtils.isEmpty(query.getSortColumns())){
if(!query.getSortColumns().equals("ts")){
hql.append(" order by ett."+query.getSortColumns()+" , ett.ts desc ");
}else{
hql.append(" order by ett.orderNum asc ");
}
}else{
hql.append(" order by ett.orderNum asc ");
}
return super.findByHql(hql.toString(), pageSize, pageNum, param.toArray());
}
/**
* 根据流程id得到流程下所有节点,并按照节点顺序排序
* @param proId
* @return
*/
public List<WfActivity> getWfActivity(String proId){
String hql = "from WfActivity where wfProject.id = ? order by orderNum asc";
List<WfActivity> list = super.findFastByHql(hql, proId);
if(list.size()>0){
return list;
}else{
return null;
}
}
}
| apache-2.0 |
bobmcwhirter/wildfly-swarm | plugin/src/main/java/org/wildfly/swarm/plugin/maven/StartMojo.java | 13234 | /**
* Copyright 2015-2016 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.plugin.maven;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.eclipse.aether.repository.RemoteRepository;
import org.wildfly.swarm.bootstrap.util.BootstrapProperties;
import org.wildfly.swarm.fractionlist.FractionList;
import org.wildfly.swarm.spi.api.SwarmProperties;
import org.wildfly.swarm.tools.ArtifactSpec;
import org.wildfly.swarm.tools.BuildTool;
import org.wildfly.swarm.tools.DependencyManager;
import org.wildfly.swarm.tools.FractionDescriptor;
import org.wildfly.swarm.tools.FractionUsageAnalyzer;
import org.wildfly.swarm.tools.exec.SwarmExecutor;
import org.wildfly.swarm.tools.exec.SwarmProcess;
/**
* @author Bob McWhirter
* @author Ken Finnigan
*/
@Mojo(name = "start",
requiresDependencyResolution = ResolutionScope.COMPILE_PLUS_RUNTIME,
requiresDependencyCollection = ResolutionScope.COMPILE_PLUS_RUNTIME)
public class StartMojo extends AbstractSwarmMojo {
@Parameter(alias = "stdoutFile", property = "swarm.stdout")
public File stdoutFile;
@Parameter(alias = "stderrFile", property = "swarm.stderr" )
public File stderrFile;
@Parameter(alias = "useUberJar", defaultValue = "${wildfly-swarm.useUberJar}")
public boolean useUberJar;
@Parameter(alias = "debug", property = SwarmProperties.DEBUG_PORT)
public Integer debugPort;
@Parameter(alias = "jvmArguments", property = "swarm.jvmArguments")
public List<String> jvmArguments = new ArrayList<>();
@Parameter(alias = "arguments" )
public List<String> arguments = new ArrayList<>();
@Parameter(property = "swarm.arguments", defaultValue = "")
public String argumentsProp;
boolean waitForProcess;
@SuppressWarnings({"unchecked", "ThrowableResultOfMethodCallIgnored"})
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
initProperties(true);
initEnvironment();
final SwarmExecutor executor;
if (this.useUberJar) {
executor = uberJarExecutor();
} else if (this.project.getPackaging().equals("war")) {
executor = warExecutor();
} else if (this.project.getPackaging().equals("jar")) {
executor = jarExecutor();
} else {
throw new MojoExecutionException("Unsupported packaging: " + this.project.getPackaging());
}
executor.withJVMArguments( this.jvmArguments );
if ( this.argumentsProp != null ) {
StringTokenizer args = new StringTokenizer(this.argumentsProp);
while ( args.hasMoreTokens() ) {
this.arguments.add( args.nextToken() );
}
}
executor.withArguments( this.arguments );
final SwarmProcess process;
try {
process = executor.withDebug(debugPort)
.withProperties(this.properties)
.withStdoutFile(this.stdoutFile != null ? this.stdoutFile.toPath() : null)
.withStderrFile(this.stderrFile != null ? this.stderrFile.toPath() : null)
.withEnvironment(this.environment)
.withWorkingDirectory(this.project.getBasedir().toPath())
.withProperty("remote.maven.repo",
String.join(",",
this.project.getRemoteProjectRepositories().stream()
.map(RemoteRepository::getUrl)
.collect(Collectors.toList())))
.execute();
Runtime.getRuntime().addShutdownHook( new Thread(()->{
try {
// Sleeping for a few millis will give time to shutdown gracefully
Thread.sleep(100L);
process.stop( 10, TimeUnit.SECONDS );
} catch (InterruptedException e) {
}
}));
process.awaitReadiness(2, TimeUnit.MINUTES);
if (!process.isAlive()) {
throw new MojoFailureException("Process failed to start");
}
if (process.getError() != null) {
throw new MojoFailureException("Error starting process", process.getError());
}
} catch (IOException e) {
throw new MojoFailureException("unable to execute", e);
} catch (InterruptedException e) {
throw new MojoFailureException("Error waiting for deployment", e);
}
List<SwarmProcess> procs = (List<SwarmProcess>) getPluginContext().get("swarm-process");
if (procs == null) {
procs = new ArrayList<>();
getPluginContext().put("swarm-process", procs);
}
procs.add(process);
if (waitForProcess) {
try {
process.waitFor();
} catch (InterruptedException e) {
try {
process.stop( 10, TimeUnit.SECONDS );
} catch (InterruptedException ie) {
// Do nothing
}
} finally {
process.destroyForcibly();
}
}
}
protected SwarmExecutor uberJarExecutor() throws MojoFailureException {
getLog().info("Starting -swarm.jar");
String finalName = this.project.getBuild().getFinalName();
if (finalName.endsWith(".war") || finalName.endsWith(".jar")) {
finalName = finalName.substring(0, finalName.length() - 4);
}
return new SwarmExecutor()
.withExecutableJar(Paths.get(this.projectBuildDir, finalName + "-swarm.jar"));
}
protected SwarmExecutor warExecutor() throws MojoFailureException {
getLog().info("Starting .war");
String finalName = this.project.getBuild().getFinalName();
if (!finalName.endsWith(".war")) {
finalName = finalName + ".war";
}
return executor(Paths.get(this.projectBuildDir, finalName), finalName, false);
}
protected SwarmExecutor jarExecutor() throws MojoFailureException {
getLog().info("Starting .jar");
final String finalName = this.project.getBuild().getFinalName();
return executor(Paths.get(this.project.getBuild().getOutputDirectory()),
finalName.endsWith(".jar") ? finalName : finalName + ".jar",
true);
}
protected SwarmExecutor executor(final Path appPath, final String name,
final boolean scanDependencies) throws MojoFailureException {
final SwarmExecutor executor = new SwarmExecutor()
.withModules(expandModules())
.withProperty(BootstrapProperties.APP_NAME, name)
.withClassPathEntries(dependencies(appPath, scanDependencies));
if (this.mainClass != null) {
executor.withMainClass(this.mainClass);
} else {
executor.withDefaultMainClass();
}
return executor;
}
List<Path> findNeededFractions(final Set<Artifact> existingDeps,
final Path source,
final boolean scanDeps) throws MojoFailureException {
getLog().info("Scanning for needed WildFly Swarm fractions with mode: " + fractionDetectMode);
final Set<String> existingDepGASet = existingDeps.stream()
.map(d -> String.format("%s:%s", d.getGroupId(), d.getArtifactId()))
.collect(Collectors.toSet());
final Set<FractionDescriptor> fractions;
final FractionUsageAnalyzer analyzer = new FractionUsageAnalyzer(FractionList.get()).source(source);
if (scanDeps) {
existingDeps.forEach(d -> analyzer.source(d.getFile()));
}
final Predicate<FractionDescriptor> notExistingDep =
d -> !existingDepGASet.contains(String.format("%s:%s", d.getGroupId(), d.getArtifactId()));
try {
fractions = analyzer.detectNeededFractions().stream()
.filter(notExistingDep)
.collect(Collectors.toSet());
} catch (IOException e) {
throw new MojoFailureException("failed to scan for fractions", e);
}
getLog().info("Detected fractions: " + String.join(", ", fractions.stream()
.map(FractionDescriptor::av)
.sorted()
.collect(Collectors.toList())));
fractions.addAll(this.additionalFractions.stream()
.map(f -> FractionDescriptor.fromGav(FractionList.get(), f))
.collect(Collectors.toSet()));
final Set<FractionDescriptor> allFractions = new HashSet<>(fractions);
allFractions.addAll(fractions.stream()
.flatMap(f -> f.getDependencies().stream())
.filter(notExistingDep)
.collect(Collectors.toSet()));
getLog().info("Using fractions: " +
String.join(", ", allFractions.stream()
.map(FractionDescriptor::gavOrAv)
.sorted()
.collect(Collectors.toList())));
final Set<ArtifactSpec> specs = new HashSet<>();
specs.addAll(existingDeps.stream()
.map(this::artifactToArtifactSpec)
.collect(Collectors.toList()));
specs.addAll(allFractions.stream()
.map(FractionDescriptor::toArtifactSpec)
.collect(Collectors.toList()));
try {
return mavenArtifactResolvingHelper().resolveAll(specs).stream()
.map(s -> s.file.toPath())
.collect(Collectors.toList());
} catch (Exception e) {
throw new MojoFailureException("failed to resolve fraction dependencies", e);
}
}
List<Path> dependencies(final Path archiveContent,
final boolean scanDependencies) throws MojoFailureException {
final List<Path> elements = new ArrayList<>();
final Set<Artifact> artifacts = this.project.getArtifacts();
boolean hasSwarmDeps = false;
for (Artifact each : artifacts) {
if (each.getGroupId().equals(DependencyManager.WILDFLY_SWARM_GROUP_ID)
&& each.getArtifactId().equals(DependencyManager.WILDFLY_SWARM_BOOTSTRAP_ARTIFACT_ID)) {
hasSwarmDeps = true;
}
if (each.getGroupId().equals("org.jboss.logmanager")
&& each.getArtifactId().equals("jboss-logmanager")) {
continue;
}
if (each.getScope().equals("provided")) {
continue;
}
elements.add(each.getFile().toPath());
}
elements.add(Paths.get(this.project.getBuild().getOutputDirectory()));
if (fractionDetectMode != BuildTool.FractionDetectionMode.never) {
if (fractionDetectMode == BuildTool.FractionDetectionMode.force ||
!hasSwarmDeps) {
List<Path> fractionDeps = findNeededFractions(artifacts, archiveContent, scanDependencies);
for(Path p : fractionDeps) {
if(!elements.contains(p))
elements.add(p);
}
}
} else if (!hasSwarmDeps) {
getLog().warn("No WildFly Swarm dependencies found and fraction detection disabled");
}
return elements;
}
List<Path> expandModules() {
return this.additionalModules.stream()
.map(m -> Paths.get(this.project.getBuild().getOutputDirectory(), m))
.collect(Collectors.toList());
}
} | apache-2.0 |
aureg/alfresco-bulk-import | amp/src/main/java/org/alfresco/extension/bulkimport/source/fs/DirectoryAnalyser.java | 13939 | /*
* Copyright (C) 2007-2015 Peter Monks.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file is part of an unsupported extension to Alfresco.
*
*/
package org.alfresco.extension.bulkimport.source.fs;
import java.io.File;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.alfresco.repo.content.ContentStore;
import org.alfresco.service.ServiceRegistry;
import org.alfresco.util.Pair;
import org.alfresco.extension.bulkimport.source.BulkImportSourceStatus;
import static org.alfresco.extension.bulkimport.util.LogUtils.*;
import static org.alfresco.extension.bulkimport.source.fs.FilesystemSourceUtils.*;
/**
* This interface defines a directory analyser. This is the process by which
* the contents of a source directory are grouped together into a list of
* <code>FilesystemBulkImportItem</code>s.
*
* @author Peter Monks (pmonks@gmail.com)
*/
public final class DirectoryAnalyser
{
private final static Log log = LogFactory.getLog(DirectoryAnalyser.class);
// Status counters
private final static String COUNTER_NAME_FILES_SCANNED = "Files scanned";
private final static String COUNTER_NAME_DIRECTORIES_SCANNED = "Directories scanned";
private final static String COUNTER_NAME_UNREADABLE_ENTRIES = "Unreadable entries";
private final static String[] COUNTER_NAMES = { COUNTER_NAME_FILES_SCANNED,
COUNTER_NAME_DIRECTORIES_SCANNED,
COUNTER_NAME_UNREADABLE_ENTRIES };
private final ServiceRegistry serviceRegistry;
private final ContentStore configuredContentStore;
private final MetadataLoader metadataLoader;
private BulkImportSourceStatus importStatus;
public DirectoryAnalyser(final ServiceRegistry serviceRegistry,
final ContentStore configuredContentStore,
final MetadataLoader metadataLoader)
{
// PRECONDITIONS
assert serviceRegistry != null : "serviceRegistry must not be null.";
assert configuredContentStore != null : "configuredContentStore must not be null.";
assert metadataLoader != null : "metadataLoader must not be null.";
assert importStatus != null : "importStatus must not be null.";
// Body
this.serviceRegistry = serviceRegistry;
this.configuredContentStore = configuredContentStore;
this.metadataLoader = metadataLoader;
}
public void init(final BulkImportSourceStatus importStatus)
{
this.importStatus = importStatus;
importStatus.preregisterSourceCounters(COUNTER_NAMES);
}
/**
* Analyses the given directory.
*
* @param sourceDirectory The source directory for the entire import (note: <u>must</u> be a directory) <i>(must not be null)</i>.
* @param directory The directory to analyse (note: <u>must</u> be a directory) <i>(must not be null)</i>.
* @return An <code>AnalysedDirectory</code> object <i>(will not be null)</i>.
* @throws InterruptedException If the thread executing the method is interrupted.
*/
public Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> analyseDirectory(final File sourceDirectory, final File directory)
throws InterruptedException
{
// PRECONDITIONS
if (sourceDirectory == null) throw new IllegalArgumentException("sourceDirectory cannot be null.");
if (directory == null) throw new IllegalArgumentException("directory cannot be null.");
// Body
if (debug(log)) debug(log, "Analysing directory " + getFileName(directory) + "...");
Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> result = null;
File[] directoryListing = null;
long analysisStart = 0L;
long analysisEnd = 0L;
long start = 0L;
long end = 0L;
String sourceRelativeParentDirectory = sourceDirectory.toPath().relativize(directory.toPath()).toString(); // Note: JDK 1.7 specific
// List the directory
start = System.nanoTime();
analysisStart = start;
directoryListing = directory.listFiles();
end = System.nanoTime();
if (trace(log)) trace(log, "List directory (" + directoryListing.length + " entries) took: " + (float)(end - start) / (1000 * 1000 * 1000) + "s.");
// Build up the list of items from the directory listing
start = System.nanoTime();
result = analyseDirectory(sourceRelativeParentDirectory, directoryListing);
end = System.nanoTime();
if (trace(log)) trace(log, "Convert directory listing to set of filesystem import items took: " + (float)(end - start) / (1000 * 1000 * 1000) + "s.");
analysisEnd = end;
if (debug(log)) debug(log, "Finished analysing directory " + getFileName(directory) + ", in " + (float)(analysisEnd - analysisStart) / (1000 * 1000 * 1000) + "s.");
return(result);
}
private Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> analyseDirectory(final String sourceRelativeParentDirectory, final File[] directoryListing)
{
Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> result = null;
if (directoryListing != null)
{
// This needs some Clojure, desperately...
Map<String, SortedMap<BigDecimal, Pair<File, File>>> categorisedFiles = categoriseFiles(directoryListing);
if (debug(log)) debug(log, "Categorised files: " + String.valueOf(categorisedFiles));
result = constructImportItems(sourceRelativeParentDirectory, categorisedFiles);
}
return(result);
}
private Map<String, SortedMap<BigDecimal, Pair<File, File>>> categoriseFiles(final File[] directoryListing)
{
Map<String, SortedMap<BigDecimal, Pair<File, File>>> result = null;
if (directoryListing != null)
{
result = new HashMap<String, SortedMap<BigDecimal, Pair<File, File>>>();
for (final File file : directoryListing)
{
categoriseFile(result, file);
}
}
return(result);
}
/*
* This method does the hard work of figuring out where the file belongs (which parent item, and where in that item's
* version history).
*/
private void categoriseFile(final Map<String, SortedMap<BigDecimal, Pair<File, File>>> categorisedFiles, final File file)
{
if (file != null)
{
if (file.canRead())
{
final String fileName = file.getName();
final String parentName = getParentName(metadataLoader, fileName);
final boolean isMetadata = isMetadataFile(metadataLoader, fileName);
final BigDecimal versionNumber = getVersionNumber(fileName);
SortedMap<BigDecimal, Pair<File, File>> versions = categorisedFiles.get(parentName);
// Find the item
if (versions == null)
{
versions = new TreeMap<BigDecimal, Pair<File, File>>();
categorisedFiles.put(parentName, versions);
}
// Find the version within the item
Pair<File, File> version = versions.get(versionNumber);
if (version == null)
{
version = new Pair<File, File>(null, null);
}
// Categorise the incoming file in that version of the item
if (isMetadata)
{
version = new Pair<File, File>(version.getFirst(), file);
}
else
{
version = new Pair<File, File>(file, version.getSecond());
}
versions.put(versionNumber, version);
if (file.isDirectory())
{
importStatus.incrementSourceCounter(COUNTER_NAME_DIRECTORIES_SCANNED);
}
else
{
importStatus.incrementSourceCounter(COUNTER_NAME_FILES_SCANNED);
}
}
else
{
if (warn(log)) warn(log, "Skipping '" + getFileName(file) + "' as Alfresco does not have permission to read it.");
importStatus.incrementSourceCounter(COUNTER_NAME_UNREADABLE_ENTRIES);
}
}
}
private Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> constructImportItems(final String sourceRelativeParentDirectory,
final Map<String, SortedMap<BigDecimal,Pair<File,File>>> categorisedFiles)
{
Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>> result = null;
if (categorisedFiles != null)
{
final List<FilesystemBulkImportItem> directoryItems = new ArrayList<FilesystemBulkImportItem>();
final List<FilesystemBulkImportItem> fileItems = new ArrayList<FilesystemBulkImportItem>();
result = new Pair<List<FilesystemBulkImportItem>, List<FilesystemBulkImportItem>>(directoryItems, fileItems);
for (final String parentName : categorisedFiles.keySet())
{
final SortedMap<BigDecimal,Pair<File,File>> itemVersions = categorisedFiles.get(parentName);
final NavigableSet<FilesystemBulkImportItemVersion> versions = constructImportItemVersions(itemVersions);
final boolean isDirectory = versions.last().isDirectory();
final FilesystemBulkImportItem item = new FilesystemBulkImportItem(parentName,
isDirectory,
sourceRelativeParentDirectory,
versions);
if (isDirectory)
{
directoryItems.add(item);
}
else
{
fileItems.add(item);
}
}
}
return(result);
}
private final NavigableSet<FilesystemBulkImportItemVersion> constructImportItemVersions(final SortedMap<BigDecimal,Pair<File,File>> itemVersions)
{
// PRECONDITIONS
if (itemVersions == null) throw new IllegalArgumentException("itemVersions cannot be null.");
if (itemVersions.size() <= 0) throw new IllegalArgumentException("itemVersions cannot be empty.");
// Body
final NavigableSet<FilesystemBulkImportItemVersion> result = new TreeSet<FilesystemBulkImportItemVersion>();
for (final BigDecimal versionNumber : itemVersions.keySet())
{
final Pair<File,File> contentAndMetadataFiles = itemVersions.get(versionNumber);
final FilesystemBulkImportItemVersion version = new FilesystemBulkImportItemVersion(serviceRegistry,
configuredContentStore,
metadataLoader,
versionNumber,
contentAndMetadataFiles.getFirst(),
contentAndMetadataFiles.getSecond());
result.add(version);
}
return(result);
}
}
| apache-2.0 |
pgh70/mycontroller | modules/core/src/main/java/org/mycontroller/standalone/api/jaxrs/mixins/deserializers/FrequencyTypeDeserializer.java | 1580 | /*
* Copyright 2015-2018 Jeeva Kandasamy (jkandasa@gmail.com)
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mycontroller.standalone.api.jaxrs.mixins.deserializers;
import java.io.IOException;
import org.mycontroller.standalone.timer.TimerUtils.FREQUENCY_TYPE;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
/**
* @author Jeeva Kandasamy (jkandasa)
* @since 0.0.2
*/
public class FrequencyTypeDeserializer extends JsonDeserializer<FREQUENCY_TYPE> {
@Override
public FREQUENCY_TYPE deserialize(JsonParser parser, DeserializationContext context)
throws IOException, JsonProcessingException {
final String nodeType = parser.getText();
if (nodeType != null) {
return FREQUENCY_TYPE.fromString(nodeType);
} else {
return null;
}
}
}
| apache-2.0 |
shenkers/CrossBrowse | src/main/java/org/mskcc/shenkers/data/interval/IntervalFeature.java | 585 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.mskcc.shenkers.data.interval;
import htsjdk.tribble.Feature;
import htsjdk.tribble.annotation.Strand;
import htsjdk.tribble.bed.FullBEDFeature;
import java.awt.Color;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
/**
*
* @author sol
*/
public interface IntervalFeature<T> extends Feature {
Strand getStrand();
T getValue();
}
| apache-2.0 |
apache/geronimo | plugins/plancreator/plancreator-portlets/src/main/java/org/apache/geronimo/console/configcreator/JSR88_Util.java | 6904 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.console.configcreator;
import java.io.File;
import javax.enterprise.deploy.shared.factories.DeploymentFactoryManager;
import javax.enterprise.deploy.spi.DeploymentManager;
import javax.enterprise.deploy.spi.Target;
import javax.enterprise.deploy.spi.status.ProgressObject;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.portlet.PortletException;
import javax.portlet.PortletRequest;
import org.apache.geronimo.deployment.plugin.jmx.CommandContext;
import org.apache.geronimo.deployment.plugin.jmx.JMXDeploymentManager;
import org.apache.geronimo.deployment.plugin.local.DistributeCommand;
import org.apache.geronimo.j2ee.deployment.ApplicationInfo;
import org.apache.geronimo.j2ee.deployment.EARConfigBuilder;
import org.apache.geronimo.kernel.Kernel;
import org.apache.geronimo.kernel.KernelRegistry;
/**
* Util class for JSR-88 related functions
*
* @version $Rev$ $Date$
*/
public class JSR88_Util {
/*private static List getEjbClassLoaders(PortletRequest request) {
List deployedEjbs = JSR77_Util.getDeployedEJBs(request);
List configurations = new ArrayList();
for (int i = 0; i < deployedEjbs.size(); i++) {
String ejbPatternName = ((ReferredData) deployedEjbs.get(i)).getPatternName();
configurations.add(getDependencyString(ejbPatternName));
}
return getConfigClassLoaders(configurations);
}
private static List getConfigClassLoaders(List configurationNames) {
List classLoaders = new ArrayList();
ConfigurationManager configurationManager = PortletManager.getConfigurationManager();
for (int i = 0; i < configurationNames.size(); i++) {
Artifact configurationId = Artifact.create((String) configurationNames.get(i));
classLoaders.add(configurationManager.getConfiguration(configurationId).getConfigurationClassLoader());
}
return classLoaders;
}*/
public static ApplicationInfo createApplicationInfo(PortletRequest actionRequest, File moduleFile) {
ApplicationInfo applicationInfo = null;
EARConfigBuilder.createPlanMode.set(Boolean.TRUE);
try {
DeploymentFactoryManager dfm = DeploymentFactoryManager.getInstance();
DeploymentManager mgr = dfm.getDeploymentManager("deployer:geronimo:inVM", null, null);
if (mgr instanceof JMXDeploymentManager) {
((JMXDeploymentManager) mgr).setLogConfiguration(false, true);
}
Target[] targets = mgr.getTargets();
if (null == targets) {
throw new IllegalStateException("No target to distribute to");
}
targets = new Target[] { targets[0] };
DistributeCommand command = new DistributeCommand(getKernel(), targets, moduleFile, null);
CommandContext commandContext = new CommandContext(true, true, null, null, false);
commandContext.setUsername("system");
commandContext.setPassword("manager");
command.setCommandContext(commandContext);
command.doDeploy(targets[0], true);
} catch (Exception e) {
// Any better ideas?
if(EARConfigBuilder.appInfo.get() == null) throw new RuntimeException(e);
} finally {
EARConfigBuilder.createPlanMode.set(Boolean.FALSE);
applicationInfo = EARConfigBuilder.appInfo.get();
EARConfigBuilder.appInfo.set(null);
}
return applicationInfo;
}
private static Kernel getKernel() {
// todo: consider making this configurable; we could easily connect to a remote kernel if we wanted to
Kernel kernel = null;
try {
kernel = (Kernel) new InitialContext().lookup("java:comp/GeronimoKernel");
} catch (NamingException e) {
// log.error("Unable to look up kernel in JNDI", e);
}
if (kernel == null) {
// log.debug("Unable to find kernel in JNDI; using KernelRegistry instead");
kernel = KernelRegistry.getSingleKernel();
}
return kernel;
}
public static String[] deploy(PortletRequest actionRequest, File moduleFile, File planFile)
throws PortletException {
// TODO this is a duplicate of the code from
// org.apache.geronimo.console.configmanager.DeploymentPortlet.processAction()
// TODO need to eliminate this duplicate code
DeploymentFactoryManager dfm = DeploymentFactoryManager.getInstance();
String[] statusMsgs = new String[2];
try {
DeploymentManager mgr = dfm.getDeploymentManager("deployer:geronimo:inVM", null, null);
try {
if (mgr instanceof JMXDeploymentManager) {
((JMXDeploymentManager) mgr).setLogConfiguration(false, true);
}
Target[] targets = mgr.getTargets();
if (null == targets) {
throw new IllegalStateException("No target to distribute to");
}
targets = new Target[] { targets[0] };
ProgressObject progress = mgr.distribute(targets, moduleFile, planFile);
while (progress.getDeploymentStatus().isRunning()) {
Thread.sleep(100);
}
if (progress.getDeploymentStatus().isCompleted()) {
progress = mgr.start(progress.getResultTargetModuleIDs());
while (progress.getDeploymentStatus().isRunning()) {
Thread.sleep(100);
}
statusMsgs[0] = "infoMsg01";
} else {
statusMsgs[0] = "errorMsg02";
statusMsgs[1] = progress.getDeploymentStatus().getMessage();
}
} finally {
mgr.release();
}
} catch (Exception e) {
throw new PortletException(e);
}
return statusMsgs;
}
}
| apache-2.0 |
jskonst/PlatypusJS | web-client/src/platypus/src/com/eas/widgets/containers/FlowGapPanel.java | 1696 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.eas.widgets.containers;
import com.eas.core.XElement;
import com.google.gwt.dom.client.Style;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.RequiresResize;
import com.google.gwt.user.client.ui.Widget;
/**
*
* @author mg
*/
public class FlowGapPanel extends FlowPanel implements RequiresResize {
protected int hgap;
protected int vgap;
public FlowGapPanel() {
super();
getElement().<XElement>cast().addResizingTransitionEnd(this);
getElement().getStyle().setLineHeight(0, Style.Unit.PX);
}
public int getHgap() {
return hgap;
}
public void setHgap(int aValue) {
hgap = aValue;
for (int i = 0; i < getWidgetCount(); i++) {
Widget w = getWidget(i);
w.getElement().getStyle().setMarginLeft(hgap, Style.Unit.PX);
}
}
public int getVgap() {
return vgap;
}
public void setVgap(int aValue) {
vgap = aValue;
for (int i = 0; i < getWidgetCount(); i++) {
Widget w = getWidget(i);
w.getElement().getStyle().setMarginTop(vgap, Style.Unit.PX);
}
}
@Override
public void add(Widget w) {
w.getElement().getStyle().setMarginLeft(hgap, Style.Unit.PX);
w.getElement().getStyle().setMarginTop(vgap, Style.Unit.PX);
w.getElement().getStyle().setDisplay(Style.Display.INLINE_BLOCK);
w.getElement().getStyle().setVerticalAlign(Style.VerticalAlign.BOTTOM);
super.add(w);
}
@Override
public void onResize() {
// reserved for future use.
}
}
| apache-2.0 |
isandlaTech/cohorte-herald | java/org.cohorte.herald.core/src/org/cohorte/herald/core/utils/MessageUtils.java | 4164 | package org.cohorte.herald.core.utils;
import java.util.Iterator;
import org.cohorte.herald.Message;
import org.cohorte.herald.MessageReceived;
import org.jabsorb.ng.JSONSerializer;
import org.jabsorb.ng.serializer.MarshallException;
import org.jabsorb.ng.serializer.UnmarshallException;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class MessageUtils {
/** The Jabsorb serializer */
private static JSONSerializer pSerializer = new JSONSerializer();
static {
try {
pSerializer.registerDefaultSerializers();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static String toJSON(Message aMsg) throws MarshallException {
JSONObject json = new JSONObject();
try {
// headers
JSONObject headers = new JSONObject();
for (String key : aMsg.getHeaders().keySet()) {
headers.put(key, aMsg.getHeaders().get(key));
}
json.put(Message.MESSAGE_HEADERS, headers);
// subject
json.put(Message.MESSAGE_SUBJECT, aMsg.getSubject());
// content
if (aMsg.getContent() != null) {
if (aMsg.getContent() instanceof String) {
json.put(Message.MESSAGE_CONTENT, aMsg.getContent());
} else {
JSONObject content = new JSONObject(pSerializer.toJSON(aMsg.getContent()));
json.put(Message.MESSAGE_CONTENT, content);
}
}
// metadata
JSONObject metadata = new JSONObject();
for (String key : aMsg.getMetadata().keySet()) {
metadata.put(key, aMsg.getMetadata().get(key));
}
json.put(Message.MESSAGE_METADATA, metadata);
} catch (JSONException e) {
e.printStackTrace();
return null;
}
return json.toString();
}
@SuppressWarnings("unchecked")
public static MessageReceived fromJSON(String json) throws UnmarshallException {
try {
JSONObject wParsedMsg = new JSONObject(json);
{
try {
// check if valid herald message (respects herald specification version)
int heraldVersion = -1;
JSONObject jHeader = wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS);
if (jHeader != null) {
if (jHeader.has(Message.MESSAGE_HERALD_VERSION)) {
heraldVersion = jHeader.getInt(Message.MESSAGE_HERALD_VERSION);
}
}
if (heraldVersion != Message.HERALD_SPECIFICATION_VERSION) {
throw new JSONException("Herald specification of the received message is not supported!");
}
MessageReceived wMsg = new MessageReceived(
wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS).getString(Message.MESSAGE_HEADER_UID),
wParsedMsg.getString(Message.MESSAGE_SUBJECT),
null,
null,
null,
null,
null,
null);
// content
Object cont = wParsedMsg.opt(Message.MESSAGE_CONTENT);
if (cont != null) {
if (cont instanceof JSONObject || cont instanceof JSONArray) {
wMsg.setContent(pSerializer.fromJSON(cont.toString()));
} else
wMsg.setContent(cont);
} else {
wMsg.setContent(null);
}
// headers
Iterator<String> wKeys;
if (wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS) != null) {
wKeys = wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS).keys();
while(wKeys.hasNext()) {
String key = wKeys.next();
wMsg.addHeader(key, wParsedMsg.getJSONObject(Message.MESSAGE_HEADERS).get(key));
}
}
// metadata
Iterator<String> wKeys2;
if (wParsedMsg.getJSONObject(Message.MESSAGE_METADATA) != null) {
wKeys2 = wParsedMsg.getJSONObject(Message.MESSAGE_METADATA).keys();
while(wKeys2.hasNext()) {
String key = wKeys2.next();
wMsg.addMetadata(key, wParsedMsg.getJSONObject(Message.MESSAGE_METADATA).get(key));
}
}
return wMsg;
} catch (JSONException e) {
e.printStackTrace();
return null;
}
}
} catch (Exception e) {
e.printStackTrace();
return null;
}
}
}
| apache-2.0 |
sunpy1106/SpringBeanLifeCycle | src/main/java/org/springframework/cache/annotation/AbstractCachingConfiguration.java | 4164 | /*
* Copyright 2002-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cache.annotation;
import java.util.Collection;
import javax.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.CacheManager;
import org.springframework.cache.interceptor.KeyGenerator;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.ImportAware;
import org.springframework.core.annotation.AnnotationAttributes;
import org.springframework.core.type.AnnotationMetadata;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
/**
* Abstract base {@code @Configuration} class providing common structure for enabling
* Spring's annotation-driven cache management capability.
*
* @author Chris Beams
* @since 3.1
* @see EnableCaching
*/
@Configuration
public abstract class AbstractCachingConfiguration implements ImportAware {
protected AnnotationAttributes enableCaching;
protected CacheManager cacheManager;
protected KeyGenerator keyGenerator;
@Autowired(required=false)
private Collection<CacheManager> cacheManagerBeans;
@Autowired(required=false)
private Collection<CachingConfigurer> cachingConfigurers;
@Override
public void setImportMetadata(AnnotationMetadata importMetadata) {
this.enableCaching = AnnotationAttributes.fromMap(
importMetadata.getAnnotationAttributes(EnableCaching.class.getName(), false));
Assert.notNull(this.enableCaching,
"@EnableCaching is not present on importing class " +
importMetadata.getClassName());
}
/**
* Determine which {@code CacheManager} bean to use. Prefer the result of
* {@link CachingConfigurer#cacheManager()} over any by-type matching. If none, fall
* back to by-type matching on {@code CacheManager}.
* @throws IllegalArgumentException if no CacheManager can be found; if more than one
* CachingConfigurer implementation exists; if multiple CacheManager beans and no
* CachingConfigurer exists to disambiguate.
*/
@PostConstruct
protected void reconcileCacheManager() {
if (!CollectionUtils.isEmpty(cachingConfigurers)) {
int nConfigurers = cachingConfigurers.size();
if (nConfigurers > 1) {
throw new IllegalStateException(nConfigurers + " implementations of " +
"CachingConfigurer were found when only 1 was expected. " +
"Refactor the configuration such that CachingConfigurer is " +
"implemented only once or not at all.");
}
CachingConfigurer cachingConfigurer = cachingConfigurers.iterator().next();
this.cacheManager = cachingConfigurer.cacheManager();
this.keyGenerator = cachingConfigurer.keyGenerator();
}
else if (!CollectionUtils.isEmpty(cacheManagerBeans)) {
int nManagers = cacheManagerBeans.size();
if (nManagers > 1) {
throw new IllegalStateException(nManagers + " beans of type CacheManager " +
"were found when only 1 was expected. Remove all but one of the " +
"CacheManager bean definitions, or implement CachingConfigurer " +
"to make explicit which CacheManager should be used for " +
"annotation-driven cache management.");
}
CacheManager cacheManager = cacheManagerBeans.iterator().next();
this.cacheManager = cacheManager;
// keyGenerator remains null; will fall back to default within CacheInterceptor
}
else {
throw new IllegalStateException("No bean of type CacheManager could be found. " +
"Register a CacheManager bean or remove the @EnableCaching annotation " +
"from your configuration.");
}
}
}
| apache-2.0 |
nverwer/cocooncomponents | src/org/apache/cocoon/transformation/DownloadTransformer.java | 11508 | package org.apache.cocoon.transformation;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Map;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipEntry;
import org.apache.avalon.framework.parameters.Parameters;
import org.apache.cocoon.ProcessingException;
import org.apache.cocoon.environment.SourceResolver;
import org.apache.commons.httpclient.HostConfiguration;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.AttributesImpl;
/**
* This transformer downloads a new file to disk.
* <p>
* It triggers for elements in the namespace "http://apache.org/cocoon/download/1.0".
* Attributes:
* @src : the file that should be downloaded
* @target (optional): path where the file should be stored (includes filename)
* @target-dir (optional): directory where the file should be stored
* @unzip (optional): if "true" then unzip file after downloading.
* If there is no @target or @target-dir attribute a temporary file is created.
* <p>
* Example XML input:
* <pre>
* {@code
* <download:download src="http://some.server.com/zipfile.zip"
* target="/tmp/zipfile.zip" unzip="true"/>
* }
* </pre>
* The @src specifies the file that should be downloaded. The
* @target specifies where the file should be stored. @unzip is true, so the
* file will be unzipped immediately.
* <p>
* The result is
* <pre>
* {@code
* <download:result unzipped="/path/to/unzipped/file/on/disk">/path/to/file/on/disk</download:result>
* }
* </pre>
* (@unzipped is only present when @unzip="true") or
* <pre>
* {@code
* <download:error>The error message</download:file>
* }
* </pre>
* if an error (other than a HTTP error) occurs.
* HTTP errors are thrown.
* Define this transformer in the sitemap:
* <pre>
* {@code
* <map:components>
* <map:transformers>
* <map:transformer name="download" logger="sitemap.transformer.download"
* src="org.apache.cocoon.transformation.DownloadTransformer"/>
* ...
* }
* </pre>
* Use this transformer:
* <pre>
* {@code
* <map:transform type="download"/>
* }
* </pre>
*
*
* @author <a href="mailto:maarten.kroon@koop.overheid.nl">Maarten Kroon</a>
* @author <a href="mailto:hhv@x-scale.nl">Huib Verweij</a>
*/
public class DownloadTransformer extends AbstractSAXTransformer {
public static final String DOWNLOAD_NS = "http://apache.org/cocoon/download/1.0";
public static final String DOWNLOAD_ELEMENT = "download";
private static final String DOWNLOAD_PREFIX = "download";
public static final String RESULT_ELEMENT = "result";
public static final String ERROR_ELEMENT = "error";
public static final String SRC_ATTRIBUTE = "src";
public static final String TARGET_ATTRIBUTE = "target";
public static final String TARGETDIR_ATTRIBUTE = "target-dir";
public static final String UNZIP_ATTRIBUTE = "unzip";
public static final String RECURSIVE_UNZIP_ATTRIBUTE = "recursive-unzip";
public static final String UNZIPPED_ATTRIBUTE = "unzipped";
public DownloadTransformer() {
this.defaultNamespaceURI = DOWNLOAD_NS;
}
@Override
public void setup(SourceResolver resolver, Map objectModel, String src,
Parameters params) throws ProcessingException, SAXException, IOException {
super.setup(resolver, objectModel, src, params);
}
@Override
public void startTransformingElement(String uri, String localName,
String qName, Attributes attributes) throws SAXException, ProcessingException, IOException {
if (DOWNLOAD_NS.equals(uri) && DOWNLOAD_ELEMENT.equals(localName)) {
try {
File[] downloadResult = download(
attributes.getValue(SRC_ATTRIBUTE),
attributes.getValue(TARGETDIR_ATTRIBUTE),
attributes.getValue(TARGET_ATTRIBUTE),
attributes.getValue(UNZIP_ATTRIBUTE),
attributes.getValue(RECURSIVE_UNZIP_ATTRIBUTE)
);
File downloadedFile = downloadResult[0];
File unzipDir = downloadResult[1];
String absPath = downloadedFile.getCanonicalPath();
AttributesImpl attrsImpl = new AttributesImpl();
if (unzipDir != null) {
attrsImpl.addAttribute("", UNZIPPED_ATTRIBUTE, UNZIPPED_ATTRIBUTE, "CDATA", unzipDir.getAbsolutePath());
}
xmlConsumer.startElement(uri, RESULT_ELEMENT, String.format("%s:%s", DOWNLOAD_PREFIX, RESULT_ELEMENT), attrsImpl);
xmlConsumer.characters(absPath.toCharArray(), 0, absPath.length());
xmlConsumer.endElement(uri, RESULT_ELEMENT, String.format("%s:%s", DOWNLOAD_PREFIX, RESULT_ELEMENT));
} catch (Exception e) {
// throw new SAXException("Error downloading file", e);
xmlConsumer.startElement(uri, ERROR_ELEMENT, qName, attributes);
String message = e.getMessage();
xmlConsumer.characters(message.toCharArray(), 0, message.length());
xmlConsumer.endElement(uri, ERROR_ELEMENT, qName);
}
} else {
super.startTransformingElement(uri, localName, qName, attributes);
}
}
@Override
public void endTransformingElement(String uri, String localName, String qName)
throws SAXException, ProcessingException, IOException {
if (DOWNLOAD_NS.equals(namespaceURI) && DOWNLOAD_ELEMENT.equals(localName)) {
return;
}
super.endTransformingElement(uri, localName, qName);
}
private File[] download(String sourceUri, String targetDir, String target, String unzip, String recursiveUnzip)
throws ProcessingException, IOException, SAXException {
File targetFile;
File unZipped = null;
if (null != target && !target.equals("")) {
targetFile = new File(target);
} else if (null != targetDir && !targetDir.equals("")) {
targetFile = new File(targetDir);
} else {
String baseName = FilenameUtils.getBaseName(sourceUri);
String extension = FilenameUtils.getExtension(sourceUri);
targetFile = File.createTempFile(baseName, "." + extension);
}
if (!targetFile.getParentFile().exists()) {
targetFile.getParentFile().mkdirs();
}
boolean unzipFile = (null != unzip && unzip.equals("true")) ||
(null != recursiveUnzip && recursiveUnzip.equals("true"));
String absPath = targetFile.getAbsolutePath();
String unzipDir = unzipFile ? FilenameUtils.removeExtension(absPath) : "";
HttpClient httpClient = new HttpClient();
httpClient.setConnectionTimeout(60000);
httpClient.setTimeout(60000);
if (System.getProperty("http.proxyHost") != null) {
// getLogger().warn("PROXY: "+System.getProperty("http.proxyHost"));
String nonProxyHostsRE = System.getProperty("http.nonProxyHosts", "");
if (nonProxyHostsRE.length() > 0) {
String[] pHosts = nonProxyHostsRE.replaceAll("\\.", "\\\\.").replaceAll("\\*", ".*").split("\\|");
nonProxyHostsRE = "";
for (String pHost : pHosts) {
nonProxyHostsRE += "|(^https?://" + pHost + ".*$)";
}
nonProxyHostsRE = nonProxyHostsRE.substring(1);
}
if (nonProxyHostsRE.length() == 0 || !sourceUri.matches(nonProxyHostsRE)) {
try {
HostConfiguration hostConfiguration = httpClient.getHostConfiguration();
hostConfiguration.setProxy(System.getProperty("http.proxyHost"), Integer.parseInt(System.getProperty("http.proxyPort", "80")));
httpClient.setHostConfiguration(hostConfiguration);
} catch (Exception e) {
throw new ProcessingException("Cannot set proxy!", e);
}
}
}
HttpMethod httpMethod = new GetMethod(sourceUri);
try {
int responseCode = httpClient.executeMethod(httpMethod);
if (responseCode < 200 || responseCode >= 300) {
throw new ProcessingException(String.format("Received HTTP status code %d (%s)", responseCode, httpMethod.getStatusText()));
}
OutputStream os = new BufferedOutputStream(new FileOutputStream(targetFile));
try {
IOUtils.copyLarge(httpMethod.getResponseBodyAsStream(), os);
} finally {
os.close();
}
} finally {
httpMethod.releaseConnection();
}
if (!"".equals(unzipDir)) {
unZipped = unZipIt(targetFile, unzipDir, recursiveUnzip);
}
return new File[] {targetFile, unZipped};
}
/**
* Unzip it
* @param zipFile input zip file
* @param outputFolder zip file output folder
*/
private File unZipIt(File zipFile, String outputFolder, String recursiveUnzip){
byte[] buffer = new byte[4096];
File folder = null;
try{
//create output directory is not exists
folder = new File(outputFolder);
if (!folder.exists()){
folder.mkdir();
}
//get the zipped file list entry
try (
//get the zip file content
ZipInputStream zis = new ZipInputStream(new FileInputStream(zipFile))) {
//get the zipped file list entry
ZipEntry ze = zis.getNextEntry();
while(ze != null){
String fileName = ze.getName();
File newFile = new File(outputFolder + File.separator + fileName);
// System.out.println("file unzip : "+ newFile.getAbsoluteFile());
// create all non existing folders
// else you will hit FileNotFoundException for compressed folder
new File(newFile.getParent()).mkdirs();
try (FileOutputStream fos = new FileOutputStream(newFile)) {
int len;
while ((len = zis.read(buffer)) > 0) {
fos.write(buffer, 0, len);
}
}
if ((null != recursiveUnzip && "true".equals(recursiveUnzip)) && FilenameUtils.getExtension(fileName).equals("zip")) {
unZipIt(newFile, FilenameUtils.concat(outputFolder, FilenameUtils.getBaseName(fileName)), recursiveUnzip);
}
ze = zis.getNextEntry();
}
zis.closeEntry();
}
// System.out.println("Done unzipping.");
} catch(IOException ex){
ex.printStackTrace();
}
return folder;
}
}
| apache-2.0 |
UlrichColby/httpcomponents-client | httpclient5-testing/src/test/java/org/apache/hc/client5/testing/sync/TestStatefulConnManagement.java | 9740 | /*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.client5.testing.sync;
import java.io.IOException;
import org.apache.hc.client5.http.HttpRoute;
import org.apache.hc.client5.http.UserTokenHandler;
import org.apache.hc.client5.http.classic.methods.HttpGet;
import org.apache.hc.client5.http.impl.classic.CloseableHttpClient;
import org.apache.hc.client5.http.protocol.HttpClientContext;
import org.apache.hc.core5.http.ClassicHttpRequest;
import org.apache.hc.core5.http.ClassicHttpResponse;
import org.apache.hc.core5.http.EndpointDetails;
import org.apache.hc.core5.http.HttpException;
import org.apache.hc.core5.http.HttpHost;
import org.apache.hc.core5.http.HttpStatus;
import org.apache.hc.core5.http.io.HttpRequestHandler;
import org.apache.hc.core5.http.io.entity.EntityUtils;
import org.apache.hc.core5.http.io.entity.StringEntity;
import org.apache.hc.core5.http.protocol.BasicHttpContext;
import org.apache.hc.core5.http.protocol.HttpContext;
import org.junit.Assert;
import org.junit.Test;
/**
* Test cases for state-ful connections.
*/
public class TestStatefulConnManagement extends LocalServerTestBase {
private static class SimpleService implements HttpRequestHandler {
public SimpleService() {
super();
}
@Override
public void handle(
final ClassicHttpRequest request,
final ClassicHttpResponse response,
final HttpContext context) throws HttpException, IOException {
response.setCode(HttpStatus.SC_OK);
final StringEntity entity = new StringEntity("Whatever");
response.setEntity(entity);
}
}
@Test
public void testStatefulConnections() throws Exception {
final int workerCount = 5;
final int requestCount = 5;
this.server.registerHandler("*", new SimpleService());
this.connManager.setMaxTotal(workerCount);
this.connManager.setDefaultMaxPerRoute(workerCount);
final UserTokenHandler userTokenHandler = new UserTokenHandler() {
@Override
public Object getUserToken(final HttpRoute route, final HttpContext context) {
final String id = (String) context.getAttribute("user");
return id;
}
};
this.clientBuilder.setUserTokenHandler(userTokenHandler);
final HttpHost target = start();
final HttpClientContext[] contexts = new HttpClientContext[workerCount];
final HttpWorker[] workers = new HttpWorker[workerCount];
for (int i = 0; i < contexts.length; i++) {
final HttpClientContext context = HttpClientContext.create();
contexts[i] = context;
workers[i] = new HttpWorker(
"user" + i,
context, requestCount, target, this.httpclient);
}
for (final HttpWorker worker : workers) {
worker.start();
}
for (final HttpWorker worker : workers) {
worker.join(LONG_TIMEOUT.toMillis());
}
for (final HttpWorker worker : workers) {
final Exception ex = worker.getException();
if (ex != null) {
throw ex;
}
Assert.assertEquals(requestCount, worker.getCount());
}
for (final HttpContext context : contexts) {
final String state0 = (String) context.getAttribute("r0");
Assert.assertNotNull(state0);
for (int r = 1; r < requestCount; r++) {
Assert.assertEquals(state0, context.getAttribute("r" + r));
}
}
}
static class HttpWorker extends Thread {
private final String uid;
private final HttpClientContext context;
private final int requestCount;
private final HttpHost target;
private final CloseableHttpClient httpclient;
private volatile Exception exception;
private volatile int count;
public HttpWorker(
final String uid,
final HttpClientContext context,
final int requestCount,
final HttpHost target,
final CloseableHttpClient httpclient) {
super();
this.uid = uid;
this.context = context;
this.requestCount = requestCount;
this.target = target;
this.httpclient = httpclient;
this.count = 0;
}
public int getCount() {
return this.count;
}
public Exception getException() {
return this.exception;
}
@Override
public void run() {
try {
this.context.setAttribute("user", this.uid);
for (int r = 0; r < this.requestCount; r++) {
final HttpGet httpget = new HttpGet("/");
final ClassicHttpResponse response = this.httpclient.execute(
this.target,
httpget,
this.context);
this.count++;
final EndpointDetails endpointDetails = this.context.getEndpointDetails();
final String connuid = Integer.toHexString(System.identityHashCode(endpointDetails));
this.context.setAttribute("r" + r, connuid);
EntityUtils.consume(response.getEntity());
}
} catch (final Exception ex) {
this.exception = ex;
}
}
}
@Test
public void testRouteSpecificPoolRecylcing() throws Exception {
// This tests what happens when a maxed connection pool needs
// to kill the last idle connection to a route to build a new
// one to the same route.
final int maxConn = 2;
this.server.registerHandler("*", new SimpleService());
this.connManager.setMaxTotal(maxConn);
this.connManager.setDefaultMaxPerRoute(maxConn);
final UserTokenHandler userTokenHandler = new UserTokenHandler() {
@Override
public Object getUserToken(final HttpRoute route, final HttpContext context) {
return context.getAttribute("user");
}
};
this.clientBuilder.setUserTokenHandler(userTokenHandler);
final HttpHost target = start();
// Bottom of the pool : a *keep alive* connection to Route 1.
final HttpContext context1 = new BasicHttpContext();
context1.setAttribute("user", "stuff");
final ClassicHttpResponse response1 = this.httpclient.execute(
target, new HttpGet("/"), context1);
EntityUtils.consume(response1.getEntity());
// The ConnPoolByRoute now has 1 free connection, out of 2 max
// The ConnPoolByRoute has one RouteSpcfcPool, that has one free connection
// for [localhost][stuff]
Thread.sleep(100);
// Send a very simple HTTP get (it MUST be simple, no auth, no proxy, no 302, no 401, ...)
// Send it to another route. Must be a keepalive.
final HttpContext context2 = new BasicHttpContext();
final ClassicHttpResponse response2 = this.httpclient.execute(
new HttpHost("127.0.0.1", this.server.getPort()), new HttpGet("/"), context2);
EntityUtils.consume(response2.getEntity());
// ConnPoolByRoute now has 2 free connexions, out of its 2 max.
// The [localhost][stuff] RouteSpcfcPool is the same as earlier
// And there is a [127.0.0.1][null] pool with 1 free connection
Thread.sleep(100);
// This will put the ConnPoolByRoute to the targeted state :
// [localhost][stuff] will not get reused because this call is [localhost][null]
// So the ConnPoolByRoute will need to kill one connection (it is maxed out globally).
// The killed conn is the oldest, which means the first HTTPGet ([localhost][stuff]).
// When this happens, the RouteSpecificPool becomes empty.
final HttpContext context3 = new BasicHttpContext();
final ClassicHttpResponse response3 = this.httpclient.execute(
target, new HttpGet("/"), context3);
// If the ConnPoolByRoute did not behave coherently with the RouteSpecificPool
// this may fail. Ex : if the ConnPool discared the route pool because it was empty,
// but still used it to build the request3 connection.
EntityUtils.consume(response3.getEntity());
}
}
| apache-2.0 |
DanielSerdyukov/droidkit-4.x | library/src/main/java/droidkit/app/MapsIntent.java | 1295 | package droidkit.app;
import android.content.Intent;
import android.net.Uri;
import android.support.annotation.NonNull;
import java.util.Locale;
/**
* @author Daniel Serdyukov
*/
public final class MapsIntent {
private static final String MAPS_URL = "https://maps.google.com/maps";
private MapsIntent() {
}
@NonNull
public static Intent openMaps() {
return new Intent(Intent.ACTION_VIEW, Uri.parse(MAPS_URL));
}
@NonNull
public static Intent openMaps(double lat, double lng) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format(Locale.US, MAPS_URL + "?q=%f,%f", lat, lng)));
}
@NonNull
public static Intent route(double lat, double lng) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format(Locale.US, MAPS_URL + "?daddr=%f,%f", lat, lng)));
}
@NonNull
public static Intent route(double fromLat, double fromLng, double toLat, double toLng) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format(Locale.US, MAPS_URL +
"?saddr=%f,%f&daddr=%f,%f", fromLat, fromLng, toLat, toLng)));
}
@NonNull
public static Intent search(@NonNull String query) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(MAPS_URL + "?q=" + query));
}
}
| apache-2.0 |
juwi/hbase | hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestReplication.java | 17081 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.test;
import com.google.common.base.Joiner;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.IntegrationTestingUtility;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.replication.ReplicationAdmin;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.replication.ReplicationPeerConfig;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Set;
import java.util.TreeSet;
import java.util.UUID;
/**
* This is an integration test for replication. It is derived off
* {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList} that creates a large circular
* linked list in one cluster and verifies that the data is correct in a sink cluster. The test
* handles creating the tables and schema and setting up the replication.
*/
public class IntegrationTestReplication extends IntegrationTestBigLinkedList {
protected String sourceClusterIdString;
protected String sinkClusterIdString;
protected int numIterations;
protected int numMappers;
protected long numNodes;
protected String outputDir;
protected int numReducers;
protected int generateVerifyGap;
protected Integer width;
protected Integer wrapMultiplier;
protected boolean noReplicationSetup = false;
private final String SOURCE_CLUSTER_OPT = "sourceCluster";
private final String DEST_CLUSTER_OPT = "destCluster";
private final String ITERATIONS_OPT = "iterations";
private final String NUM_MAPPERS_OPT = "numMappers";
private final String OUTPUT_DIR_OPT = "outputDir";
private final String NUM_REDUCERS_OPT = "numReducers";
private final String NO_REPLICATION_SETUP_OPT = "noReplicationSetup";
/**
* The gap (in seconds) from when data is finished being generated at the source
* to when it can be verified. This is the replication lag we are willing to tolerate
*/
private final String GENERATE_VERIFY_GAP_OPT = "generateVerifyGap";
/**
* The width of the linked list.
* See {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList} for more details
*/
private final String WIDTH_OPT = "width";
/**
* The number of rows after which the linked list points to the first row.
* See {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList} for more details
*/
private final String WRAP_MULTIPLIER_OPT = "wrapMultiplier";
/**
* The number of nodes in the test setup. This has to be a multiple of WRAP_MULTIPLIER * WIDTH
* in order to ensure that the linked list can is complete.
* See {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList} for more details
*/
private final String NUM_NODES_OPT = "numNodes";
private final int DEFAULT_NUM_MAPPERS = 1;
private final int DEFAULT_NUM_REDUCERS = 1;
private final int DEFAULT_NUM_ITERATIONS = 1;
private final int DEFAULT_GENERATE_VERIFY_GAP = 60;
private final int DEFAULT_WIDTH = 1000000;
private final int DEFAULT_WRAP_MULTIPLIER = 25;
private final int DEFAULT_NUM_NODES = DEFAULT_WIDTH * DEFAULT_WRAP_MULTIPLIER;
/**
* Wrapper around an HBase ClusterID allowing us
* to get admin connections and configurations for it
*/
protected class ClusterID {
private final Configuration configuration;
private Connection connection = null;
/**
* This creates a new ClusterID wrapper that will automatically build connections and
* configurations to be able to talk to the specified cluster
*
* @param base the base configuration that this class will add to
* @param key the cluster key in the form of zk_quorum:zk_port:zk_parent_node
*/
public ClusterID(Configuration base,
String key) {
configuration = new Configuration(base);
String[] parts = key.split(":");
configuration.set(HConstants.ZOOKEEPER_QUORUM, parts[0]);
configuration.set(HConstants.ZOOKEEPER_CLIENT_PORT, parts[1]);
configuration.set(HConstants.ZOOKEEPER_ZNODE_PARENT, parts[2]);
}
@Override
public String toString() {
return Joiner.on(":").join(configuration.get(HConstants.ZOOKEEPER_QUORUM),
configuration.get(HConstants.ZOOKEEPER_CLIENT_PORT),
configuration.get(HConstants.ZOOKEEPER_ZNODE_PARENT));
}
public Configuration getConfiguration() {
return this.configuration;
}
public Connection getConnection() throws Exception {
if (this.connection == null) {
this.connection = ConnectionFactory.createConnection(this.configuration);
}
return this.connection;
}
public void closeConnection() throws Exception {
this.connection.close();
this.connection = null;
}
public boolean equals(ClusterID other) {
return this.toString().equalsIgnoreCase(other.toString());
}
}
/**
* The main runner loop for the test. It uses
* {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList}
* for the generation and verification of the linked list. It is heavily based on
* {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList.Loop}
*/
protected class VerifyReplicationLoop extends Configured implements Tool {
private final Log LOG = LogFactory.getLog(VerifyReplicationLoop.class);
protected ClusterID source;
protected ClusterID sink;
IntegrationTestBigLinkedList integrationTestBigLinkedList;
/**
* This tears down any tables that existed from before and rebuilds the tables and schemas on
* the source cluster. It then sets up replication from the source to the sink cluster by using
* the {@link org.apache.hadoop.hbase.client.replication.ReplicationAdmin}
* connection.
*
* @throws Exception
*/
protected void setupTablesAndReplication() throws Exception {
TableName tableName = getTableName(source.getConfiguration());
ClusterID[] clusters = {source, sink};
// delete any old tables in the source and sink
for (ClusterID cluster : clusters) {
Admin admin = cluster.getConnection().getAdmin();
if (admin.tableExists(tableName)) {
if (admin.isTableEnabled(tableName)) {
admin.disableTable(tableName);
}
/**
* TODO: This is a work around on a replication bug (HBASE-13416)
* When we recreate a table against that has recently been
* deleted, the contents of the logs are replayed even though
* they should not. This ensures that we flush the logs
* before the table gets deleted. Eventually the bug should be
* fixed and this should be removed.
*/
Set<ServerName> regionServers = new TreeSet<>();
for (HRegionLocation rl :
cluster.getConnection().getRegionLocator(tableName).getAllRegionLocations()) {
regionServers.add(rl.getServerName());
}
for (ServerName server : regionServers) {
source.getConnection().getAdmin().rollWALWriter(server);
}
admin.deleteTable(tableName);
}
}
// create the schema
Generator generator = new Generator();
generator.setConf(source.getConfiguration());
generator.createSchema();
// setup the replication on the source
if (!source.equals(sink)) {
ReplicationAdmin replicationAdmin = new ReplicationAdmin(source.getConfiguration());
// remove any old replication peers
for (String oldPeer : replicationAdmin.listPeerConfigs().keySet()) {
replicationAdmin.removePeer(oldPeer);
}
// set the sink to be the target
ReplicationPeerConfig peerConfig = new ReplicationPeerConfig();
peerConfig.setClusterKey(sink.toString());
// set the test table to be the table to replicate
HashMap<TableName, ArrayList<String>> toReplicate = new HashMap<>();
toReplicate.put(tableName, new ArrayList<String>(0));
replicationAdmin.addPeer("TestPeer", peerConfig, toReplicate);
replicationAdmin.enableTableRep(tableName);
replicationAdmin.close();
}
for (ClusterID cluster : clusters) {
cluster.closeConnection();
}
}
protected void waitForReplication() throws Exception {
// TODO: we shouldn't be sleeping here. It would be better to query the region servers
// and wait for them to report 0 replication lag.
Thread.sleep(generateVerifyGap * 1000);
}
/**
* Run the {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList.Generator} in the
* source cluster. This assumes that the tables have been setup via setupTablesAndReplication.
*
* @throws Exception
*/
protected void runGenerator() throws Exception {
Path outputPath = new Path(outputDir);
UUID uuid = UUID.randomUUID(); //create a random UUID.
Path generatorOutput = new Path(outputPath, uuid.toString());
Generator generator = new Generator();
generator.setConf(source.getConfiguration());
int retCode = generator.run(numMappers, numNodes, generatorOutput, width, wrapMultiplier);
if (retCode > 0) {
throw new RuntimeException("Generator failed with return code: " + retCode);
}
}
/**
* Run the {@link org.apache.hadoop.hbase.test.IntegrationTestBigLinkedList.Verify}
* in the sink cluster. If replication is working properly the data written at the source
* cluster should be available in the sink cluster after a reasonable gap
*
* @param expectedNumNodes the number of nodes we are expecting to see in the sink cluster
* @throws Exception
*/
protected void runVerify(long expectedNumNodes) throws Exception {
Path outputPath = new Path(outputDir);
UUID uuid = UUID.randomUUID(); //create a random UUID.
Path iterationOutput = new Path(outputPath, uuid.toString());
Verify verify = new Verify();
verify.setConf(sink.getConfiguration());
int retCode = verify.run(iterationOutput, numReducers);
if (retCode > 0) {
throw new RuntimeException("Verify.run failed with return code: " + retCode);
}
if (!verify.verify(expectedNumNodes)) {
throw new RuntimeException("Verify.verify failed");
}
LOG.info("Verify finished with success. Total nodes=" + expectedNumNodes);
}
/**
* The main test runner
*
* This test has 4 steps:
* 1: setupTablesAndReplication
* 2: generate the data into the source cluster
* 3: wait for replication to propagate
* 4: verify that the data is available in the sink cluster
*
* @param args should be empty
* @return 0 on success
* @throws Exception on an error
*/
@Override
public int run(String[] args) throws Exception {
source = new ClusterID(getConf(), sourceClusterIdString);
sink = new ClusterID(getConf(), sinkClusterIdString);
if (!noReplicationSetup) {
setupTablesAndReplication();
}
int expectedNumNodes = 0;
for (int i = 0; i < numIterations; i++) {
LOG.info("Starting iteration = " + i);
expectedNumNodes += numMappers * numNodes;
runGenerator();
waitForReplication();
runVerify(expectedNumNodes);
}
/**
* we are always returning 0 because exceptions are thrown when there is an error
* in the verification step.
*/
return 0;
}
}
@Override
protected void addOptions() {
super.addOptions();
addRequiredOptWithArg("s", SOURCE_CLUSTER_OPT,
"Cluster ID of the source cluster (e.g. localhost:2181:/hbase)");
addRequiredOptWithArg("r", DEST_CLUSTER_OPT,
"Cluster ID of the sink cluster (e.g. localhost:2182:/hbase)");
addRequiredOptWithArg("d", OUTPUT_DIR_OPT,
"Temporary directory where to write keys for the test");
addOptWithArg("nm", NUM_MAPPERS_OPT,
"Number of mappers (default: " + DEFAULT_NUM_MAPPERS + ")");
addOptWithArg("nr", NUM_REDUCERS_OPT,
"Number of reducers (default: " + DEFAULT_NUM_MAPPERS + ")");
addOptNoArg("nrs", NO_REPLICATION_SETUP_OPT,
"Don't setup tables or configure replication before starting test");
addOptWithArg("n", NUM_NODES_OPT,
"Number of nodes. This should be a multiple of width * wrapMultiplier." +
" (default: " + DEFAULT_NUM_NODES + ")");
addOptWithArg("i", ITERATIONS_OPT, "Number of iterations to run (default: " +
DEFAULT_NUM_ITERATIONS + ")");
addOptWithArg("t", GENERATE_VERIFY_GAP_OPT,
"Gap between generate and verify steps in seconds (default: " +
DEFAULT_GENERATE_VERIFY_GAP + ")");
addOptWithArg("w", WIDTH_OPT,
"Width of the linked list chain (default: " + DEFAULT_WIDTH + ")");
addOptWithArg("wm", WRAP_MULTIPLIER_OPT, "How many times to wrap around (default: " +
DEFAULT_WRAP_MULTIPLIER + ")");
}
@Override
protected void processOptions(CommandLine cmd) {
processBaseOptions(cmd);
sourceClusterIdString = cmd.getOptionValue(SOURCE_CLUSTER_OPT);
sinkClusterIdString = cmd.getOptionValue(DEST_CLUSTER_OPT);
outputDir = cmd.getOptionValue(OUTPUT_DIR_OPT);
/** This uses parseInt from {@link org.apache.hadoop.hbase.util.AbstractHBaseTool} */
numMappers = parseInt(cmd.getOptionValue(NUM_MAPPERS_OPT,
Integer.toString(DEFAULT_NUM_MAPPERS)),
1, Integer.MAX_VALUE);
numReducers = parseInt(cmd.getOptionValue(NUM_REDUCERS_OPT,
Integer.toString(DEFAULT_NUM_REDUCERS)),
1, Integer.MAX_VALUE);
numNodes = parseInt(cmd.getOptionValue(NUM_NODES_OPT, Integer.toString(DEFAULT_NUM_NODES)),
1, Integer.MAX_VALUE);
generateVerifyGap = parseInt(cmd.getOptionValue(GENERATE_VERIFY_GAP_OPT,
Integer.toString(DEFAULT_GENERATE_VERIFY_GAP)),
1, Integer.MAX_VALUE);
numIterations = parseInt(cmd.getOptionValue(ITERATIONS_OPT,
Integer.toString(DEFAULT_NUM_ITERATIONS)),
1, Integer.MAX_VALUE);
width = parseInt(cmd.getOptionValue(WIDTH_OPT, Integer.toString(DEFAULT_WIDTH)),
1, Integer.MAX_VALUE);
wrapMultiplier = parseInt(cmd.getOptionValue(WRAP_MULTIPLIER_OPT,
Integer.toString(DEFAULT_WRAP_MULTIPLIER)),
1, Integer.MAX_VALUE);
if (cmd.hasOption(NO_REPLICATION_SETUP_OPT)) {
noReplicationSetup = true;
}
if (numNodes % (width * wrapMultiplier) != 0) {
throw new RuntimeException("numNodes must be a multiple of width and wrap multiplier");
}
}
@Override
public int runTestFromCommandLine() throws Exception {
VerifyReplicationLoop tool = new VerifyReplicationLoop();
tool.integrationTestBigLinkedList = this;
return ToolRunner.run(getConf(), tool, null);
}
public static void main(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create();
IntegrationTestingUtility.setUseDistributedCluster(conf);
int ret = ToolRunner.run(conf, new IntegrationTestReplication(), args);
System.exit(ret);
}
}
| apache-2.0 |
apache/calcite | testkit/src/main/java/org/apache/calcite/test/SqlRuntimeTester.java | 3405 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.test;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.StringAndPos;
import org.apache.calcite.sql.test.AbstractSqlTester;
import org.apache.calcite.sql.test.SqlTestFactory;
import org.apache.calcite.sql.test.SqlTests;
import org.apache.calcite.sql.validate.SqlValidator;
import org.checkerframework.checker.nullness.qual.Nullable;
import static org.junit.jupiter.api.Assertions.assertNotNull;
/**
* Tester of {@link SqlValidator} and runtime execution of the input SQL.
*/
class SqlRuntimeTester extends AbstractSqlTester {
SqlRuntimeTester() {
}
@Override public void checkFails(SqlTestFactory factory, StringAndPos sap,
String expectedError, boolean runtime) {
final StringAndPos sap2 =
StringAndPos.of(runtime ? buildQuery2(factory, sap.addCarets())
: buildQuery(sap.addCarets()));
assertExceptionIsThrown(factory, sap2, expectedError, runtime);
}
@Override public void checkAggFails(SqlTestFactory factory,
String expr,
String[] inputValues,
String expectedError,
boolean runtime) {
String query =
SqlTests.generateAggQuery(expr, inputValues);
final StringAndPos sap = StringAndPos.of(query);
assertExceptionIsThrown(factory, sap, expectedError, runtime);
}
@Override public void assertExceptionIsThrown(SqlTestFactory factory,
StringAndPos sap, @Nullable String expectedMsgPattern) {
assertExceptionIsThrown(factory, sap, expectedMsgPattern, false);
}
public void assertExceptionIsThrown(SqlTestFactory factory,
StringAndPos sap, @Nullable String expectedMsgPattern, boolean runtime) {
final SqlNode sqlNode;
try {
sqlNode = parseQuery(factory, sap.sql);
} catch (Throwable e) {
checkParseEx(e, expectedMsgPattern, sap);
return;
}
Throwable thrown = null;
final SqlTests.Stage stage;
final SqlValidator validator = factory.createValidator();
if (runtime) {
stage = SqlTests.Stage.RUNTIME;
SqlNode validated = validator.validate(sqlNode);
assertNotNull(validated);
try {
check(factory, sap.sql, SqlTests.ANY_TYPE_CHECKER,
SqlTests.ANY_PARAMETER_CHECKER, SqlTests.ANY_RESULT_CHECKER);
} catch (Throwable ex) {
// get the real exception in runtime check
thrown = ex;
}
} else {
stage = SqlTests.Stage.VALIDATE;
try {
validator.validate(sqlNode);
} catch (Throwable ex) {
thrown = ex;
}
}
SqlTests.checkEx(thrown, expectedMsgPattern, sap, stage);
}
}
| apache-2.0 |
Teradata/presto | presto-main/src/test/java/com/facebook/presto/operator/scalar/TestZipWithFunction.java | 5166 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.operator.scalar;
import com.facebook.presto.spi.type.ArrayType;
import com.facebook.presto.spi.type.RowType;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.testng.annotations.Test;
import java.util.Optional;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.IntegerType.INTEGER;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.spi.type.VarcharType.createVarcharType;
import static com.facebook.presto.type.UnknownType.UNKNOWN;
import static com.facebook.presto.util.StructuralTestUtil.mapType;
import static java.util.Arrays.asList;
public class TestZipWithFunction
extends AbstractTestFunctions
{
@Test
public void testRetainedSizeBounded()
{
assertCachedInstanceHasBoundedRetainedSize("zip_with(ARRAY [25, 26, 27], ARRAY [1, 2, 3], (x, y) -> x + y)");
}
@Test
public void testSameLength()
{
assertFunction("zip_with(ARRAY[], ARRAY[], (x, y) -> (y, x))",
new ArrayType(new RowType(ImmutableList.of(UNKNOWN, UNKNOWN), Optional.empty())),
ImmutableList.of());
assertFunction("zip_with(ARRAY[1, 2], ARRAY['a', 'b'], (x, y) -> (y, x))",
new ArrayType(new RowType(ImmutableList.of(createVarcharType(1), INTEGER), Optional.empty())),
ImmutableList.of(ImmutableList.of("a", 1), ImmutableList.of("b", 2)));
assertFunction("zip_with(ARRAY[1, 2], ARRAY[CAST('a' AS VARCHAR), CAST('b' AS VARCHAR)], (x, y) -> (y, x))",
new ArrayType(new RowType(ImmutableList.of(VARCHAR, INTEGER), Optional.empty())),
ImmutableList.of(ImmutableList.of("a", 1), ImmutableList.of("b", 2)));
assertFunction("zip_with(ARRAY[1, 1], ARRAY[1, 2], (x, y) -> x + y)",
new ArrayType(INTEGER),
ImmutableList.of(2, 3));
assertFunction("zip_with(CAST(ARRAY[3, 5] AS ARRAY(BIGINT)), CAST(ARRAY[1, 2] AS ARRAY(BIGINT)), (x, y) -> x * y)",
new ArrayType(BIGINT),
ImmutableList.of(3L, 10L));
assertFunction("zip_with(ARRAY[true, false], ARRAY[false, true], (x, y) -> x OR y)",
new ArrayType(BOOLEAN),
ImmutableList.of(true, true));
assertFunction("zip_with(ARRAY['a', 'b'], ARRAY['c', 'd'], (x, y) -> concat(x, y))",
new ArrayType(VARCHAR),
ImmutableList.of("ac", "bd"));
assertFunction("zip_with(ARRAY[MAP(ARRAY[CAST ('a' AS VARCHAR)], ARRAY[1]), MAP(ARRAY[CAST('b' AS VARCHAR)], ARRAY[2])], ARRAY[MAP(ARRAY['c'], ARRAY[3]), MAP()], (x, y) -> map_concat(x, y))",
new ArrayType(mapType(VARCHAR, INTEGER)),
ImmutableList.of(ImmutableMap.of("a", 1, "c", 3), ImmutableMap.of("b", 2)));
}
@Test
public void testDifferentLength()
{
assertInvalidFunction("zip_with(ARRAY[1], ARRAY['a', 'b'], (x, y) -> (y, x))", "Arrays must have the same length");
assertInvalidFunction("zip_with(ARRAY[NULL, 2], ARRAY['a'], (x, y) -> (y, x))", "Arrays must have the same length");
assertInvalidFunction("zip_with(ARRAY[1, NULL], ARRAY[NULL, 2, 1], (x, y) -> x + y)", "Arrays must have the same length");
}
@Test
public void testWithNull()
{
assertFunction("zip_with(CAST(NULL AS ARRAY(UNKNOWN)), ARRAY[], (x, y) -> (y, x))",
new ArrayType(new RowType(ImmutableList.of(UNKNOWN, UNKNOWN), Optional.empty())),
null);
assertFunction("zip_with(ARRAY[NULL], ARRAY[NULL], (x, y) -> (y, x))",
new ArrayType(new RowType(ImmutableList.of(UNKNOWN, UNKNOWN), Optional.empty())),
ImmutableList.of(asList(null, null)));
assertFunction("zip_with(ARRAY[NULL], ARRAY[NULL], (x, y) -> x IS NULL AND y IS NULL)",
new ArrayType(BOOLEAN),
ImmutableList.of(true));
assertFunction("zip_with(ARRAY['a', NULL], ARRAY[NULL, 1], (x, y) -> x IS NULL OR y IS NULL)",
new ArrayType(BOOLEAN),
ImmutableList.of(true, true));
assertFunction("zip_with(ARRAY[1, NULL], ARRAY[3, 4], (x, y) -> x + y)",
new ArrayType(INTEGER),
asList(4, null));
assertFunction("zip_with(ARRAY['a', 'b'], ARRAY[1, 3], (x, y) -> NULL)",
new ArrayType(UNKNOWN),
asList(null, null));
}
}
| apache-2.0 |
jdeppe-pivotal/geode | geode-core/src/main/java/org/apache/geode/admin/jmx/internal/SystemMemberRegionJmxImpl.java | 4385 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.admin.jmx.internal;
import javax.management.ObjectName;
import javax.management.modelmbean.ModelMBean;
import org.apache.geode.admin.internal.SystemMemberCacheImpl;
import org.apache.geode.cache.Region;
import org.apache.geode.internal.admin.GemFireVM;
/**
* MBean representation of {@link org.apache.geode.admin.SystemMemberRegion}.
*
* @since GemFire 3.5
*/
public class SystemMemberRegionJmxImpl
extends org.apache.geode.admin.internal.SystemMemberRegionImpl
implements org.apache.geode.admin.jmx.internal.ManagedResource {
/** The object name of this managed resource */
private ObjectName objectName;
// -------------------------------------------------------------------------
// Constructor(s)
// -------------------------------------------------------------------------
/**
* Constructs an instance of SystemMemberRegionJmxImpl.
*
* @param cache the cache this region belongs to
* @param region internal region to delegate real work to
*/
public SystemMemberRegionJmxImpl(SystemMemberCacheImpl cache, Region region)
throws org.apache.geode.admin.AdminException {
super(cache, region);
initializeMBean(cache);
}
/** Create and register the MBean to manage this resource */
private void initializeMBean(SystemMemberCacheImpl cache)
throws org.apache.geode.admin.AdminException {
GemFireVM vm = cache.getVM();
mbeanName = "GemFire.Cache:" + "path="
+ MBeanUtils.makeCompliantMBeanNameProperty(getFullPath()) + ",name="
+ MBeanUtils.makeCompliantMBeanNameProperty(cache.getName()) + ",id="
+ cache.getId() + ",owner="
+ MBeanUtils.makeCompliantMBeanNameProperty(vm.getId().toString())
+ ",type=Region";
objectName = MBeanUtils.createMBean(this);
}
// -------------------------------------------------------------------------
// ManagedResource implementation
// -------------------------------------------------------------------------
/** The name of the MBean that will manage this resource */
private String mbeanName;
/** The ModelMBean that is configured to manage this resource */
private ModelMBean modelMBean;
@Override
public String getMBeanName() {
return mbeanName;
}
@Override
public ModelMBean getModelMBean() {
return modelMBean;
}
@Override
public void setModelMBean(ModelMBean modelMBean) {
this.modelMBean = modelMBean;
}
@Override
public ObjectName getObjectName() {
return objectName;
}
@Override
public ManagedResourceType getManagedResourceType() {
return ManagedResourceType.SYSTEM_MEMBER_REGION;
}
@Override
public void cleanupResource() {}
/**
* Checks equality of the given object with <code>this</code> based on the type (Class) and the
* MBean Name returned by <code>getMBeanName()</code> methods.
*
* @param obj object to check equality with
* @return true if the given object is if the same type and its MBean Name is same as
* <code>this</code> object's MBean Name, false otherwise
*/
@Override
public boolean equals(Object obj) {
if (!(obj instanceof SystemMemberRegionJmxImpl)) {
return false;
}
SystemMemberRegionJmxImpl other = (SystemMemberRegionJmxImpl) obj;
return getMBeanName().equals(other.getMBeanName());
}
/**
* Returns hash code for <code>this</code> object which is based on the MBean Name generated.
*
* @return hash code for <code>this</code> object
*/
@Override
public int hashCode() {
return getMBeanName().hashCode();
}
}
| apache-2.0 |
houbie/lesscss | src/main/java/com/github/houbie/lesscss/engine/ScriptEngineLessCompilationEngine.java | 4246 | /*
* Copyright (c) 2013 Houbrechts IT
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.houbie.lesscss.engine;
import com.github.houbie.lesscss.LessParseException;
import com.github.houbie.lesscss.resourcereader.ResourceReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.script.Invocable;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.SequenceInputStream;
import java.util.Map;
import static com.github.houbie.lesscss.LessCompiler.CompilationDetails;
/**
* LessCompilationEngine implementation that uses a standard {@link javax.script.ScriptEngine} implementation.
*/
public class ScriptEngineLessCompilationEngine implements LessCompilationEngine {
private static Logger logger = LoggerFactory.getLogger(ScriptEngineLessCompilationEngine.class);
private static final String JS_ALL_MIN_JS = "js/all-min.js";
private static final String LESS_SCRIPT = "js/less-rhino-1.7.0-mod.js";
private static final String MINIFY_SCRIPT = "js/cssmin.js";
private static final String COMPILE_SCRIPT = "js/compile.js";
private static final boolean MINIFIED = true;
private ScriptEngine scriptEngine;
/**
* @param scriptEngineName the name of the underlying ScriptEngine (e.g. "nashorn", "rhino", ...)
*/
public ScriptEngineLessCompilationEngine(String scriptEngineName) {
logger.info("creating new NashornEngine");
ScriptEngineManager factory = new ScriptEngineManager();
scriptEngine = factory.getEngineByName(scriptEngineName);
if (scriptEngine == null) {
throw new RuntimeException("The ScriptEngine " + scriptEngineName + " could not be loaded");
}
}
/**
* @param scriptEngine the underlying ScriptEngine
*/
public ScriptEngineLessCompilationEngine(ScriptEngine scriptEngine) {
logger.info("creating new engine with {}", scriptEngine.getClass());
this.scriptEngine = scriptEngine;
}
@Override
public void initialize(Reader customJavaScriptReader) {
try {
if (customJavaScriptReader != null) {
scriptEngine.eval(customJavaScriptReader);
}
scriptEngine.eval(getLessScriptReader());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
private Reader getLessScriptReader() {
ClassLoader cl = getClass().getClassLoader();
InputStream concatenatedScripts;
if (MINIFIED) {
concatenatedScripts = cl.getResourceAsStream(JS_ALL_MIN_JS);
} else {
concatenatedScripts = new SequenceInputStream(cl.getResourceAsStream(LESS_SCRIPT), new SequenceInputStream(cl.getResourceAsStream(MINIFY_SCRIPT), cl.getResourceAsStream(COMPILE_SCRIPT)));
}
return new InputStreamReader(concatenatedScripts);
}
@Override
public CompilationDetails compile(String less, CompilationOptions compilationOptions, ResourceReader resourceReader) {
Map result;
try {
result = (Map) ((Invocable) scriptEngine).invokeFunction("compile", less, compilationOptions, resourceReader);
} catch (Exception e) {
throw new RuntimeException("Exception while compiling less", e);
}
if (result.get("parseException") != null) {
throw new LessParseException((String) result.get("parseException"));
}
return new CompilationDetails((String) result.get("css"), (String) result.get("sourceMapContent"));
}
public ScriptEngine getScriptEngine() {
return scriptEngine;
}
}
| apache-2.0 |
intrigus/jtransc | jtransc-main/test/issues/issue130/Impl_0.java | 134 | package issues.issue130;
public class Impl_0 {
public int a = 0;
protected void printMe(String s) {
System.out.println(s);
}
}
| apache-2.0 |
apache/solr | solr/core/src/java/org/apache/solr/handler/export/StringValue.java | 4760 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.export;
import java.io.IOException;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues;
import org.apache.lucene.index.OrdinalMap;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues;
class StringValue implements SortValue {
private final SortedDocValues globalDocValues;
private final OrdinalMap ordinalMap;
private final String field;
private final IntComp comp;
protected LongValues toGlobal = LongValues.IDENTITY; // this segment to global ordinal. NN;
protected SortedDocValues docValues;
public int currentOrd;
protected int lastDocID;
private boolean present;
private BytesRef lastBytes;
private String lastString;
private int lastOrd = -1;
private int leafOrd = -1;
public StringValue(SortedDocValues globalDocValues, String field, IntComp comp) {
this.globalDocValues = globalDocValues;
this.docValues = globalDocValues;
if (globalDocValues instanceof MultiDocValues.MultiSortedDocValues) {
this.ordinalMap = ((MultiDocValues.MultiSortedDocValues) globalDocValues).mapping;
} else {
this.ordinalMap = null;
}
this.field = field;
this.comp = comp;
this.currentOrd = comp.resetValue();
this.present = false;
}
public String getLastString() {
return this.lastString;
}
public void setLastString(String lastString) {
this.lastString = lastString;
}
public StringValue copy() {
StringValue copy = new StringValue(globalDocValues, field, comp);
return copy;
}
public void setCurrentValue(int docId) throws IOException {
// System.out.println(docId +":"+lastDocID);
/*
if (docId < lastDocID) {
throw new AssertionError("docs were sent out-of-order: lastDocID=" + lastDocID + " vs doc=" + docId);
}
lastDocID = docId;
*/
if (docId > docValues.docID()) {
docValues.advance(docId);
}
if (docId == docValues.docID()) {
present = true;
currentOrd = docValues.ordValue();
} else {
present = false;
currentOrd = -1;
}
}
@Override
public boolean isPresent() {
return present;
}
public void setCurrentValue(SortValue sv) {
StringValue v = (StringValue) sv;
this.currentOrd = v.currentOrd;
this.present = v.present;
this.leafOrd = v.leafOrd;
this.lastOrd = v.lastOrd;
this.toGlobal = v.toGlobal;
}
public Object getCurrentValue() throws IOException {
assert present == true;
if (currentOrd != lastOrd) {
lastBytes = docValues.lookupOrd(currentOrd);
lastOrd = currentOrd;
lastString = null;
}
return lastBytes;
}
public void toGlobalValue(SortValue previousValue) {
lastOrd = currentOrd;
StringValue sv = (StringValue) previousValue;
if (sv.lastOrd == currentOrd) {
// Take the global ord from the previousValue unless we are a -1 which is the same in both
// global and leaf ordinal
if (this.currentOrd != -1) {
this.currentOrd = sv.currentOrd;
}
} else {
if (this.currentOrd > -1) {
this.currentOrd = (int) toGlobal.get(this.currentOrd);
}
}
}
public String getField() {
return field;
}
public void setNextReader(LeafReaderContext context) throws IOException {
leafOrd = context.ord;
if (ordinalMap != null) {
toGlobal = ordinalMap.getGlobalOrds(context.ord);
}
docValues = DocValues.getSorted(context.reader(), field);
lastDocID = 0;
}
public void reset() {
this.currentOrd = comp.resetValue();
this.present = false;
lastDocID = 0;
}
public int compareTo(SortValue o) {
StringValue sv = (StringValue) o;
return comp.compare(currentOrd, sv.currentOrd);
}
public String toString() {
return Integer.toString(this.currentOrd);
}
}
| apache-2.0 |
OnurKirkizoglu/master_thesis | at.jku.sea.cloud.rest/src/main/java/at/jku/sea/cloud/rest/pojo/stream/provider/PojoCollectionArtifactProvider.java | 932 | package at.jku.sea.cloud.rest.pojo.stream.provider;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonTypeInfo.Id;
import com.fasterxml.jackson.annotation.JsonTypeName;
import at.jku.sea.cloud.rest.pojo.PojoCollectionArtifact;
@JsonTypeInfo(use = Id.NAME, property = "__type")
@JsonTypeName(value = "CollectionArtifactProvider")
public class PojoCollectionArtifactProvider extends PojoProvider {
private PojoCollectionArtifact collectionArtifact;
public PojoCollectionArtifactProvider() {
}
public PojoCollectionArtifactProvider(PojoCollectionArtifact collectionArtifact) {
this.collectionArtifact = collectionArtifact;
}
public PojoCollectionArtifact getCollectionArtifact() {
return collectionArtifact;
}
public void setCollectionArtifact(PojoCollectionArtifact collectionArtifact) {
this.collectionArtifact = collectionArtifact;
}
}
| apache-2.0 |
sdaschner/scalable-coffee-shop | beans/src/main/java/com/sebastian_daschner/scalable_coffee_shop/beans/boundary/BeansResource.java | 1063 | package com.sebastian_daschner.scalable_coffee_shop.beans.boundary;
import javax.inject.Inject;
import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonObjectBuilder;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
@Path("beans")
public class BeansResource {
@Inject
BeanCommandService commandService;
@Inject
BeanQueryService queryService;
@GET
public JsonObject getBeans() {
final JsonObjectBuilder builder = Json.createObjectBuilder();
queryService.getStoredBeans()
.entrySet().forEach(e -> builder.add(e.getKey(), e.getValue()));
return builder.build();
}
@POST
public void storeBeans(JsonObject object) {
final String beanOrigin = object.getString("beanOrigin", null);
final int amount = object.getInt("amount", 0);
if (beanOrigin == null || amount == 0)
throw new BadRequestException();
commandService.storeBeans(beanOrigin, amount);
}
}
| apache-2.0 |
jwren/intellij-community | platform/execution/src/com/intellij/execution/configurations/RuntimeConfigurationException.java | 1497 | // Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.execution.configurations;
import com.intellij.execution.ExecutionBundle;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.ui.ValidationInfo;
import com.intellij.openapi.util.NlsContexts.DialogMessage;
import com.intellij.util.ThrowableRunnable;
import javax.swing.*;
import static com.intellij.openapi.util.NlsContexts.DialogTitle;
public class RuntimeConfigurationException extends ConfigurationException {
public RuntimeConfigurationException(@DialogMessage String message, @DialogTitle String title) {
super(message, title);
}
public RuntimeConfigurationException(@DialogMessage String message) {
super(message, ExecutionBundle.message("run.configuration.error.dialog.title"));
}
public RuntimeConfigurationException(@DialogMessage String message, Throwable cause) {
super(message, cause, ExecutionBundle.message("run.configuration.error.dialog.title"));
}
public static <T extends Throwable> ValidationInfo validate(JComponent component, ThrowableRunnable<T> runnable) {
try {
runnable.run();
return new ValidationInfo("", component);
}
catch (ProcessCanceledException e) {
throw e;
}
catch (Throwable t) {
return new ValidationInfo(t.getMessage(), component);
}
}
} | apache-2.0 |
sreedishps/pintail | messaging-client-core/src/main/java/com/inmobi/messaging/Message.java | 2267 | package com.inmobi.messaging;
/*
* #%L
* messaging-client-core
* %%
* Copyright (C) 2012 - 2014 InMobi
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.nio.ByteBuffer;
/**
* Message class holding the data.
*
*/
public final class Message implements MessageBase {
private ByteBuffer data;
public Message() {
}
/**
* Create new message with {@link ByteBuffer}
*
* @param data The {@link ByteBuffer}
*/
public Message(ByteBuffer data) {
this.data = data;
}
/**
* Create new message with byte array
*
* @param data The byte array.
*/
public Message(byte[] data) {
this.data = ByteBuffer.wrap(data);
}
/**
* Get the data associated with message.
*
* @return {@link ByteBuffer} holding the data.
*/
public ByteBuffer getData() {
return data;
}
public synchronized void set(ByteBuffer data) {
this.data = data;
}
public synchronized void clear() {
data.clear();
}
public long getSize() {
return data.limit();
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((data == null) ? 0 : data.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
Message other = (Message) obj;
if (data == null) {
if (other.data != null) {
return false;
}
} else if (!data.equals(other.data)) {
return false;
}
return true;
}
@Override
public Message clone() {
Message m = new Message(data.duplicate());
return m;
}
}
| apache-2.0 |
psakar/Resteasy | resteasy-jaxrs/src/main/java/org/jboss/resteasy/client/core/ClientInvoker.java | 6212 | package org.jboss.resteasy.client.core;
import org.jboss.resteasy.client.ClientExecutor;
import org.jboss.resteasy.client.ClientRequest;
import org.jboss.resteasy.client.ClientResponse;
import org.jboss.resteasy.client.ProxyConfig;
import org.jboss.resteasy.client.core.extractors.ClientErrorHandler;
import org.jboss.resteasy.client.core.extractors.ClientRequestContext;
import org.jboss.resteasy.client.core.extractors.EntityExtractor;
import org.jboss.resteasy.client.core.extractors.EntityExtractorFactory;
import org.jboss.resteasy.client.core.marshallers.ClientMarshallerFactory;
import org.jboss.resteasy.client.core.marshallers.Marshaller;
import org.jboss.resteasy.client.exception.mapper.ClientExceptionMapper;
import org.jboss.resteasy.resteasy_jaxrs.i18n.Messages;
import org.jboss.resteasy.specimpl.ResteasyUriBuilder;
import org.jboss.resteasy.spi.ResteasyProviderFactory;
import org.jboss.resteasy.util.MediaTypeHelper;
import javax.ws.rs.Path;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.ext.Providers;
import java.lang.reflect.Method;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
/**
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
@SuppressWarnings("unchecked")
public class ClientInvoker extends ClientInterceptorRepositoryImpl implements MethodInvoker
{
protected ResteasyProviderFactory providerFactory;
protected String httpMethod;
protected ResteasyUriBuilder uri;
protected Method method;
protected Class declaring;
protected MediaType accepts;
protected Marshaller[] marshallers;
protected ClientExecutor executor;
protected boolean followRedirects;
protected EntityExtractor extractor;
protected EntityExtractorFactory extractorFactory;
protected URI baseUri;
protected Map<String, Object> attributes = new HashMap<String, Object>();
public ClientInvoker(URI baseUri, Class declaring, Method method, ResteasyProviderFactory providerFactory, ClientExecutor executor, EntityExtractorFactory extractorFactory)
{
this(baseUri, declaring, method, new ProxyConfig(null, executor, providerFactory, extractorFactory, null, null, null));
}
public ClientInvoker(URI baseUri, Class declaring, Method method, ProxyConfig config)
{
this.declaring = declaring;
this.method = method;
this.marshallers = ClientMarshallerFactory.createMarshallers(declaring, method, providerFactory, config.getServerConsumes());
this.providerFactory = config.getProviderFactory();
this.executor = config.getExecutor();
accepts = MediaTypeHelper.getProduces(declaring, method, config.getServerProduces());
this.uri = new ResteasyUriBuilder();
this.baseUri = baseUri;
uri.uri(baseUri);
if (declaring.isAnnotationPresent(Path.class)) uri.path(declaring);
if (method.isAnnotationPresent(Path.class)) uri.path(method);
this.extractorFactory = config.getExtractorFactory();
this.extractor = extractorFactory.createExtractor(method);
}
public Map<String, Object> getAttributes()
{
return attributes;
}
public MediaType getAccepts()
{
return accepts;
}
public Method getMethod()
{
return method;
}
public Class getDeclaring()
{
return declaring;
}
public ResteasyProviderFactory getProviderFactory()
{
return providerFactory;
}
public Object invoke(Object[] args)
{
boolean isProvidersSet = ResteasyProviderFactory.getContextData(Providers.class) != null;
if (!isProvidersSet) ResteasyProviderFactory.pushContext(Providers.class, providerFactory);
try
{
if (uri == null) throw new RuntimeException(Messages.MESSAGES.baseURINotSetForClientProxy());
ClientRequest request = createRequest(args);
BaseClientResponse clientResponse = null;
try
{
clientResponse = (BaseClientResponse) request.httpMethod(httpMethod);
}
catch (Exception e)
{
ClientExceptionMapper<Exception> mapper = providerFactory.getClientExceptionMapper(Exception.class);
if (mapper != null)
{
throw mapper.toException(e);
}
throw new RuntimeException(e);
}
ClientErrorHandler errorHandler = new ClientErrorHandler(providerFactory.getClientErrorInterceptors());
clientResponse.setAttributeExceptionsTo(method.toString());
clientResponse.setAnnotations(method.getAnnotations());
ClientRequestContext clientRequestContext = new ClientRequestContext(request, clientResponse, errorHandler, extractorFactory, baseUri);
return extractor.extractEntity(clientRequestContext);
}
finally
{
if (!isProvidersSet) ResteasyProviderFactory.popContextData(Providers.class);
}
}
protected ClientRequest createRequest(Object[] args)
{
ClientRequest request = new ClientRequest(uri, executor, providerFactory);
request.getAttributes().putAll(attributes);
if (accepts != null) request.header(HttpHeaders.ACCEPT, accepts.toString());
this.copyClientInterceptorsTo(request);
boolean isClientResponseResult = ClientResponse.class.isAssignableFrom(method.getReturnType());
request.followRedirects(!isClientResponseResult || this.followRedirects);
for (int i = 0; i < marshallers.length; i++)
{
marshallers[i].build(request, args[i]);
}
return request;
}
public String getHttpMethod()
{
return httpMethod;
}
public void setHttpMethod(String httpMethod)
{
this.httpMethod = httpMethod;
}
public boolean isFollowRedirects()
{
return followRedirects;
}
public void setFollowRedirects(boolean followRedirects)
{
this.followRedirects = followRedirects;
}
public void followRedirects()
{
setFollowRedirects(true);
}
} | apache-2.0 |
vishnudevk/MiBandDecompiled | Original Files/source/src/cn/com/smartdevices/bracelet/view/s.java | 1132 | // Decompiled by Jad v1.5.8e. Copyright 2001 Pavel Kouznetsov.
// Jad home page: http://www.geocities.com/kpdus/jad.html
// Decompiler options: braces fieldsfirst space lnc
package cn.com.smartdevices.bracelet.view;
import android.animation.Animator;
// Referenced classes of package cn.com.smartdevices.bracelet.view:
// RoundProgressBar
class s
implements android.animation.Animator.AnimatorListener
{
final RoundProgressBar a;
s(RoundProgressBar roundprogressbar)
{
a = roundprogressbar;
super();
}
public void onAnimationCancel(Animator animator)
{
}
public void onAnimationEnd(Animator animator)
{
if (RoundProgressBar.a(a) < RoundProgressBar.b(a) && RoundProgressBar.c(a) < RoundProgressBar.b(a))
{
RoundProgressBar.a(a, RoundProgressBar.b(a));
RoundProgressBar.a(a, RoundProgressBar.a(a) - RoundProgressBar.c(a), RoundProgressBar.c(a), RoundProgressBar.a(a));
}
}
public void onAnimationRepeat(Animator animator)
{
}
public void onAnimationStart(Animator animator)
{
}
}
| apache-2.0 |
WeRockStar/iosched | third_party/material-components-android/lib/src/android/support/design/widget/DrawableUtils.java | 2166 | /*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.design.widget;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.DrawableContainer;
import android.util.Log;
import java.lang.reflect.Method;
/** Caution. Gross hacks ahead. */
class DrawableUtils {
private static final String LOG_TAG = "DrawableUtils";
private static Method sSetConstantStateMethod;
private static boolean sSetConstantStateMethodFetched;
private DrawableUtils() {}
static boolean setContainerConstantState(
DrawableContainer drawable, Drawable.ConstantState constantState) {
// We can use getDeclaredMethod() on v9+
return setContainerConstantStateV9(drawable, constantState);
}
private static boolean setContainerConstantStateV9(
DrawableContainer drawable, Drawable.ConstantState constantState) {
if (!sSetConstantStateMethodFetched) {
try {
sSetConstantStateMethod =
DrawableContainer.class.getDeclaredMethod(
"setConstantState", DrawableContainer.DrawableContainerState.class);
sSetConstantStateMethod.setAccessible(true);
} catch (NoSuchMethodException e) {
Log.e(LOG_TAG, "Could not fetch setConstantState(). Oh well.");
}
sSetConstantStateMethodFetched = true;
}
if (sSetConstantStateMethod != null) {
try {
sSetConstantStateMethod.invoke(drawable, constantState);
return true;
} catch (Exception e) {
Log.e(LOG_TAG, "Could not invoke setConstantState(). Oh well.");
}
}
return false;
}
}
| apache-2.0 |
antag99/artemis-odb | artemis/src/test/java/com/artemis/EntitySystemTest.java | 2187 | package com.artemis;
import static org.junit.Assert.assertEquals;
import java.util.NoSuchElementException;
import com.artemis.systems.EntityProcessingSystem;
import com.artemis.utils.IntBag;
import org.junit.Test;
import com.artemis.utils.ImmutableBag;
/**
* Created by obartley on 6/9/14.
*/
public class EntitySystemTest {
@SuppressWarnings("static-method")
@Test(expected = NoSuchElementException.class)
public void test_process_one_inactive() {
World w = new World(new WorldConfiguration()
.setSystem(new IteratorTestSystem(0)));
Entity e = w.createEntity();
e.edit().add(new C());
e.disable();
w.process();
}
@SuppressWarnings("static-method")
@Test
public void test_process_one_active() {
World w = new World(new WorldConfiguration()
.setSystem(new IteratorTestSystem(1)));
Entity e = w.createEntity();
e.edit().add(new C());
w.process();
}
@Test
public void aspect_exclude_only() {
ExcludingSystem es1 = new ExcludingSystem();
EmptySystem es2 = new EmptySystem();
World w = new World(new WorldConfiguration()
.setSystem(es1)
.setSystem(es2));
Entity e = w.createEntity();
w.process();
assertEquals(1, es1.getActives().size());
assertEquals(1, es2.getActives().size());
}
public static class C extends Component {}
public static class C2 extends Component {}
public static class IteratorTestSystem extends EntitySystem {
public int expectedSize;
@SuppressWarnings("unchecked")
public IteratorTestSystem(int expectedSize) {
super(Aspect.all(C.class));
this.expectedSize = expectedSize;
}
@Override
protected void processSystem() {
assertEquals(expectedSize, subscription.getEntities().size());
getActives().iterator().next();
}
@Override
protected boolean checkProcessing() {
return true;
}
}
public static class ExcludingSystem extends EntityProcessingSystem {
public ExcludingSystem() {
super(Aspect.exclude(C.class));
}
@Override
protected void process(Entity e) {}
}
public static class EmptySystem extends EntityProcessingSystem {
public EmptySystem() {
super(Aspect.all());
}
@Override
protected void process(Entity e) {}
}
}
| apache-2.0 |
MaTriXy/gce2retrofit | gce2retrofit/src/main/java/com/sqisland/gce2retrofit/Generator.java | 13413 | package com.sqisland.gce2retrofit;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.stream.JsonReader;
import com.squareup.javawriter.JavaWriter;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.text.WordUtils;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import static javax.lang.model.element.Modifier.PUBLIC;
public class Generator {
private static final String OPTION_CLASS_MAP = "classmap";
private static final String OPTION_METHODS = "methods";
private static Gson gson = new Gson();
public enum MethodType {
SYNC, ASYNC, REACTIVE
}
public static void main(String... args)
throws IOException, URISyntaxException {
Options options = getOptions();
CommandLine cmd = getCommandLine(options, args);
if (cmd == null) {
return;
}
String[] arguments = cmd.getArgs();
if (arguments.length != 2) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("java -jar gce2retrofit.jar discovery.json output_dir", options);
System.exit(1);
}
String discoveryFile = arguments[0];
String outputDir = arguments[1];
Map<String, String> classMap = cmd.hasOption(OPTION_CLASS_MAP)?
readClassMap(new FileReader(cmd.getOptionValue(OPTION_CLASS_MAP))) : null;
EnumSet<MethodType> methodTypes = getMethods(cmd.getOptionValue(OPTION_METHODS));
generate(new FileReader(discoveryFile), new FileWriterFactory(new File(outputDir)),
classMap, methodTypes);
}
private static Options getOptions() {
Options options = new Options();
options.addOption(
OPTION_CLASS_MAP, true, "Map fields to classes. Format: field_name\\tclass_name");
options.addOption(
OPTION_METHODS, true,
"Methods to generate, either sync, async or reactive. Default is to generate sync & async.");
return options;
}
private static CommandLine getCommandLine(Options options, String... args) {
CommandLineParser parser = new BasicParser();
try {
CommandLine cmd = parser.parse(options, args);
return cmd;
} catch (ParseException e) {
System.out.println("Unexpected exception:" + e.getMessage());
}
return null;
}
public static void generate(
Reader discoveryReader, WriterFactory writerFactory,
Map<String, String> classMap, EnumSet<MethodType> methodTypes)
throws IOException, URISyntaxException {
JsonReader jsonReader = new JsonReader(discoveryReader);
Discovery discovery = gson.fromJson(jsonReader, Discovery.class);
String packageName = StringUtil.getPackageName(discovery.baseUrl);
if (packageName == null || packageName.isEmpty()) {
packageName = StringUtil.getPackageName(discovery.rootUrl);
}
String modelPackageName = packageName + ".model";
for (Entry<String, JsonElement> entry : discovery.schemas.entrySet()) {
generateModel(
writerFactory, modelPackageName, entry.getValue().getAsJsonObject(), classMap);
}
if (discovery.resources != null) {
generateInterfaceFromResources(
writerFactory, packageName, "", discovery.resources, methodTypes);
}
if (discovery.name != null && discovery.methods != null) {
generateInterface(
writerFactory, packageName, discovery.name, discovery.methods, methodTypes);
}
}
public static Map<String, String> readClassMap(Reader reader) throws IOException {
Map<String, String> classMap = new HashMap<String, String>();
String line;
BufferedReader bufferedReader = new BufferedReader(reader);
while ((line = bufferedReader.readLine()) != null) {
String[] fields = line.split("\t");
if (fields.length == 2) {
classMap.put(fields[0], fields[1]);
}
}
return classMap;
}
public static EnumSet<MethodType> getMethods(String input) {
EnumSet<MethodType> methodTypes = EnumSet.noneOf(MethodType.class);
if (input != null) {
String[] parts = input.split(",");
for (String part : parts) {
if ("sync".equals(part) || "both".equals(part)) {
methodTypes.add(MethodType.SYNC);
}
if ("async".equals(part) || "both".equals(part)) {
methodTypes.add(MethodType.ASYNC);
}
if ("reactive".equals(part)) {
methodTypes.add(MethodType.REACTIVE);
}
}
}
if (methodTypes.isEmpty()) {
methodTypes = EnumSet.of(Generator.MethodType.ASYNC, Generator.MethodType.SYNC);
}
return methodTypes;
}
private static void generateModel(
WriterFactory writerFactory, String modelPackageName,
JsonObject schema, Map<String, String> classMap)
throws IOException {
String id = schema.get("id").getAsString();
String path = StringUtil.getPath(modelPackageName, id + ".java");
Writer writer = writerFactory.getWriter(path);
JavaWriter javaWriter = new JavaWriter(writer);
javaWriter.emitPackage(modelPackageName)
.emitImports("com.google.gson.annotations.SerializedName")
.emitEmptyLine()
.emitImports("java.util.List")
.emitEmptyLine();
String type = schema.get("type").getAsString();
if (type.equals("object")) {
javaWriter.beginType(modelPackageName + "." + id, "class", EnumSet.of(PUBLIC));
generateObject(javaWriter, schema, classMap);
javaWriter.endType();
} else if (type.equals("string")) {
javaWriter.beginType(modelPackageName + "." + id, "enum", EnumSet.of(PUBLIC));
generateEnum(javaWriter, schema);
javaWriter.endType();
}
writer.close();
}
private static void generateObject(
JavaWriter javaWriter, JsonObject schema, Map<String, String> classMap)
throws IOException {
JsonElement element = schema.get("properties");
if (element == null) {
return;
}
JsonObject properties = element.getAsJsonObject();
for (Entry<String, JsonElement> entry : properties.entrySet()) {
String key = entry.getKey();
String variableName = key;
if (StringUtil.isReservedWord(key)) {
javaWriter.emitAnnotation("SerializedName(\"" + key + "\")");
variableName += "_";
}
PropertyType propertyType = gson.fromJson(
entry.getValue(), PropertyType.class);
String javaType = propertyType.toJavaType();
if (classMap != null && classMap.containsKey(key)) {
javaType = classMap.get(key);
}
javaWriter.emitField(javaType, variableName, EnumSet.of(PUBLIC));
}
}
private static void generateEnum(JavaWriter javaWriter, JsonObject schema) throws IOException {
JsonArray enums = schema.get("enum").getAsJsonArray();
for (int i = 0; i < enums.size(); ++i) {
javaWriter.emitEnumValue(enums.get(i).getAsString());
}
}
private static void generateInterfaceFromResources(
WriterFactory writerFactory, String packageName,
String resourceName, JsonObject resources,
EnumSet<MethodType> methodTypes)
throws IOException {
for (Entry<String, JsonElement> entry : resources.entrySet()) {
JsonObject entryValue = entry.getValue().getAsJsonObject();
if (entryValue.has("methods")) {
generateInterface(writerFactory, packageName,
resourceName + "_" + entry.getKey(),
entryValue.get("methods").getAsJsonObject(),
methodTypes);
}
if (entryValue.has("resources")) {
generateInterfaceFromResources(writerFactory, packageName,
resourceName + "_" + entry.getKey(),
entryValue.get("resources").getAsJsonObject(),
methodTypes);
}
}
}
private static void generateInterface(
WriterFactory writerFactory, String packageName,
String resourceName, JsonObject methods,
EnumSet<MethodType> methodTypes)
throws IOException {
String capitalizedName = WordUtils.capitalizeFully(resourceName, '_');
String className = capitalizedName.replaceAll("_", "");
String path = StringUtil.getPath(packageName, className + ".java");
Writer fileWriter = writerFactory.getWriter(path);
JavaWriter javaWriter = new JavaWriter(fileWriter);
javaWriter.emitPackage(packageName)
.emitImports(packageName + ".model.*")
.emitEmptyLine()
.emitImports(
"retrofit.Callback",
"retrofit.client.Response",
"retrofit.http.GET",
"retrofit.http.POST",
"retrofit.http.PATCH",
"retrofit.http.DELETE",
"retrofit.http.Body",
"retrofit.http.Path",
"retrofit.http.Query");
if (methodTypes.contains(MethodType.REACTIVE)) {
javaWriter.emitImports("rx.Observable");
}
javaWriter.emitEmptyLine();
javaWriter.beginType(
packageName + "." + className, "interface", EnumSet.of(PUBLIC));
for (Entry<String, JsonElement> entry : methods.entrySet()) {
String methodName = entry.getKey();
Method method = gson.fromJson(entry.getValue(), Method.class);
for (MethodType methodType : methodTypes) {
javaWriter.emitAnnotation(method.httpMethod, "\"/" + method.path + "\"");
emitMethodSignature(fileWriter, methodName, method, methodType);
}
}
javaWriter.endType();
fileWriter.close();
}
// TODO: Use JavaWriter to emit method signature
private static void emitMethodSignature(
Writer writer, String methodName, Method method, MethodType methodType) throws IOException {
ArrayList<String> params = new ArrayList<String>();
if (method.request != null) {
params.add("@Body " + method.request.$ref + " " +
(method.request.parameterName != null ? method.request.parameterName : "resource"));
}
for (Entry<String, JsonElement> param : getParams(method)) {
params.add(param2String(param));
}
String returnValue = "void";
if (methodType == MethodType.SYNC && "POST".equals(method.httpMethod)) {
returnValue = "Response";
}
if (method.response != null) {
if (methodType == MethodType.SYNC) {
returnValue = method.response.$ref;
} else if (methodType == MethodType.REACTIVE) {
returnValue = "Observable<" + method.response.$ref + ">";
}
}
if (methodType == MethodType.ASYNC) {
if (method.response == null) {
params.add("Callback<Void> cb");
} else {
params.add("Callback<" + method.response.$ref + "> cb");
}
}
writer.append(" " + returnValue + " " + methodName + (methodType == MethodType.REACTIVE ? "Rx" : "") + "(");
for (int i = 0; i < params.size(); ++i) {
if (i != 0) {
writer.append(", ");
}
writer.append(params.get(i));
}
writer.append(");\n");
}
/**
* Assemble a list of parameters, with the first entries matching the ones
* listed in parameterOrder
*
* @param method The method containing parameters and parameterOrder
* @return Ordered parameters
*/
private static List<Entry<String, JsonElement>> getParams(Method method) {
List<Entry<String, JsonElement>> params
= new ArrayList<Entry<String, JsonElement>>();
if (method.parameters == null) {
return params;
}
// Convert the entry set into a map, and extract the keys not listed in
// parameterOrder
HashMap<String, Entry<String, JsonElement>> map
= new HashMap<String, Entry<String, JsonElement>>();
List<String> remaining = new ArrayList<String>();
for (Entry<String, JsonElement> entry : method.parameters.entrySet()) {
String key = entry.getKey();
map.put(key, entry);
if (method.parameterOrder == null ||
!method.parameterOrder.contains(key)) {
remaining.add(key);
}
}
// Add the keys in parameterOrder
if (method.parameterOrder != null) {
for (String key : method.parameterOrder) {
params.add(map.get(key));
}
}
// Then add the keys not in parameterOrder
for (String key : remaining) {
params.add(map.get(key));
}
return params;
}
private static String param2String(Entry<String, JsonElement> param) {
StringBuffer buf = new StringBuffer();
String paramName = param.getKey();
ParameterType paramType = gson.fromJson(
param.getValue(), ParameterType.class);
if ("path".equals(paramType.location)) {
buf.append("@Path(\"" + paramName + "\") ");
}
if ("query".equals(paramType.location)) {
buf.append("@Query(\"" + paramName + "\") ");
}
String type = paramType.toJavaType();
if (!paramType.required) {
type = StringUtil.primitiveToObject(type);
}
buf.append(type + " " + paramName);
return buf.toString();
}
}
| apache-2.0 |
machristie/airavata | modules/registry/registry-core/src/main/java/org/apache/airavata/registry/core/app/catalog/model/ApplicationInterface.java | 3284 | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.core.app.catalog.model;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import java.io.Serializable;
import java.sql.Timestamp;
@Entity
@Table(name = "APPLICATION_INTERFACE")
public class ApplicationInterface implements Serializable {
@Id
@Column(name = "INTERFACE_ID")
private String interfaceID;
@Column(name = "APPLICATION_NAME")
private String appName;
@Column(name = "APPLICATION_DESCRIPTION")
private String appDescription;
@Column(name = "CREATION_TIME")
private Timestamp creationTime;
@Column(name = "GATEWAY_ID")
private String gatewayId;
@Column(name = "ARCHIVE_WORKING_DIRECTORY")
private boolean archiveWorkingDirectory;
@Column(name = "HAS_OPTIONAL_FILE_INPUTS")
private boolean hasOptionalFileInputs;
@Column(name = "UPDATE_TIME")
private Timestamp updateTime;
public String getGatewayId() {
return gatewayId;
}
public void setGatewayId(String gatewayId) {
this.gatewayId = gatewayId;
}
public boolean isArchiveWorkingDirectory() {
return archiveWorkingDirectory;
}
public void setArchiveWorkingDirectory(boolean archiveWorkingDirectory) {
this.archiveWorkingDirectory = archiveWorkingDirectory;
}
public Timestamp getCreationTime() {
return creationTime;
}
public void setCreationTime(Timestamp creationTime) {
this.creationTime = creationTime;
}
public Timestamp getUpdateTime() {
return updateTime;
}
public void setUpdateTime(Timestamp updateTime) {
this.updateTime = updateTime;
}
public String getInterfaceID() {
return interfaceID;
}
public void setInterfaceID(String interfaceID) {
this.interfaceID = interfaceID;
}
public String getAppName() {
return appName;
}
public void setAppName(String appName) {
this.appName = appName;
}
public String getAppDescription() {
return appDescription;
}
public void setAppDescription(String appDescription) {
this.appDescription = appDescription;
}
public boolean isHasOptionalFileInputs() {
return hasOptionalFileInputs;
}
public void setHasOptionalFileInputs(boolean hasOptionalFileInputs) {
this.hasOptionalFileInputs = hasOptionalFileInputs;
}
}
| apache-2.0 |
jimmytheneutrino/petit | modules/orm/src/main/java/com/nortal/petit/orm/statement/ExecutableStatement.java | 1555 | /**
* Copyright 2014 Nortal AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nortal.petit.orm.statement;
import java.util.List;
import org.springframework.util.CollectionUtils;
/**
* @author Lauri Lättemäe (lauri.lattemae@nortal.com)
* @created 29.04.2013
*/
public abstract class ExecutableStatement<B> extends SimpleStatement<B> {
/**
* Returns statements sql with parameter values
*
* @return
*/
@Override
public String getSqlWithParams() {
prepare();
StringBuffer sb = new StringBuffer();
if (!CollectionUtils.isEmpty(getBeans())) {
for (B bean : getBeans()) {
prepare(bean);
sb.append(super.getSqlWithParams()).append("\n");
}
} else {
sb.append(super.getSqlWithParams()).append("\n");
}
return sb.toString();
}
protected abstract List<B> getBeans();
protected abstract void prepare(B bean);
public abstract void exec();
}
| apache-2.0 |
antz29/closure-compiler | src/com/google/javascript/jscomp/UnreachableCodeElimination.java | 8731 | /*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.javascript.jscomp.ControlFlowGraph.Branch;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.NodeTraversal.ScopedCallback;
import com.google.javascript.jscomp.graph.GraphReachability;
import com.google.javascript.jscomp.graph.DiGraph.DiGraphEdge;
import com.google.javascript.jscomp.graph.DiGraph.DiGraphNode;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.Deque;
import java.util.LinkedList;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Removes dead code from a parse tree. The kinds of dead code that this pass
* removes are:
* - Any code following a return statement, such as the <code>alert</code>
* call in: <code>if (x) { return; alert('unreachable'); }</code>.
* - Statements that have no side effects, such as:
* <code>a.b.MyClass.prototype.propertyName;</code> or <code>true;</code>.
* That first kind of statement sometimes appears intentionally, so that
* prototype properties can be annotated using JSDoc without actually
* being initialized.
*
*/
class UnreachableCodeElimination extends AbstractPostOrderCallback
implements CompilerPass, ScopedCallback {
private static final Logger logger =
Logger.getLogger(UnreachableCodeElimination.class.getName());
private final AbstractCompiler compiler;
private final boolean removeNoOpStatements;
Deque<ControlFlowGraph<Node>> cfgStack =
new LinkedList<ControlFlowGraph<Node>>();
ControlFlowGraph<Node> curCfg = null;
UnreachableCodeElimination(AbstractCompiler compiler,
boolean removeNoOpStatements) {
this.compiler = compiler;
this.removeNoOpStatements = removeNoOpStatements;
}
@Override
public void enterScope(NodeTraversal t) {
Scope scope = t.getScope();
// Computes the control flow graph.
ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, false);
cfa.process(null, scope.getRootNode());
cfgStack.push(curCfg);
curCfg = cfa.getCfg();
new GraphReachability<Node, ControlFlowGraph.Branch>(curCfg)
.compute(curCfg.getEntry().getValue());
}
@Override
public void exitScope(NodeTraversal t) {
curCfg = cfgStack.pop();
}
@Override
public void process(Node externs, Node root) {
NodeTraversal.traverse(compiler, root, this);
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (parent == null) {
return;
}
if (n.getType() == Token.FUNCTION || n.getType() == Token.SCRIPT) {
return;
}
// Removes TRYs that had its CATCH removed and/or empty FINALLY.
// TODO(dcc): Move the parts of this that don't require a control flow
// graph to PeepholeRemoveDeadCode
if (n.getType() == Token.TRY) {
Node body = n.getFirstChild();
Node catchOrFinallyBlock = body.getNext();
Node finallyBlock = catchOrFinallyBlock.getNext();
if (!catchOrFinallyBlock.hasChildren() &&
(finallyBlock == null || !finallyBlock.hasChildren())) {
n.removeChild(body);
parent.replaceChild(n, body);
compiler.reportCodeChange();
n = body;
}
}
DiGraphNode<Node, Branch> gNode = curCfg.getDirectedGraphNode(n);
if (gNode == null) { // Not in CFG.
return;
}
if (gNode.getAnnotation() != GraphReachability.REACHABLE ||
(removeNoOpStatements && !NodeUtil.mayHaveSideEffects(n))) {
removeDeadExprStatementSafely(n);
return;
}
tryRemoveUnconditionalBranching(n);
}
/**
* Tries to remove n if an unconditional branch node (break, continue or
* return) if the target of n is the same as the the follow of n. That is, if
* we remove n, the control flow remains the same. Also if n targets to
* another unconditional branch, this function will recursively try to remove
* the target branch as well. The reason why we want to cascade this removal
* is because we only run this pass once. If we have code such as
*
* break -> break -> break
*
* where all 3 break's are useless. The order of removal matters. When we
* first look at the first break, we see that it branches to the 2nd break.
* However, if we remove the last break, the 2nd break becomes useless and
* finally the first break becomes useless as well.
*
* @return The target of this jump. If the target is also useless jump,
* the target of that useless jump recursively.
*/
@SuppressWarnings("fallthrough")
private Node tryRemoveUnconditionalBranching(Node n) {
/*
* For each of the unconditional branching control flow node, check to see
* if the ControlFlowAnalysis.computeFollowNode of that node is same as
* the branching target. If it is, the branch node is safe to be removed.
*
* This is not as clever as MinimizeExitPoints because it doesn't do any
* if-else conversion but it handles more complicated switch statements
* much nicer.
*/
// If n is null the target is the end of the function, nothing to do.
if (n == null) {
return n;
}
DiGraphNode<Node, Branch> gNode = curCfg.getDirectedGraphNode(n);
if (gNode == null) {
return n;
}
// If the parent is null, this mean whatever node it was there is now
// useless and it has been removed by other logics in this pass. That node
// while no longer exists in the AST, is still in the CFG because we
// never update the graph as nodes are removed.
if (n.getParent() == null) {
List<DiGraphEdge<Node,Branch>> outEdges = gNode.getOutEdges();
if (outEdges.size() == 1) {
return tryRemoveUnconditionalBranching(
outEdges.get(0).getDestination().getValue());
}
}
switch (n.getType()) {
case Token.BLOCK:
if (n.hasChildren()) {
Node first = n.getFirstChild();
return tryRemoveUnconditionalBranching(first);
} else {
return tryRemoveUnconditionalBranching(
ControlFlowAnalysis.computeFollowNode(n));
}
case Token.RETURN:
if (n.hasChildren()) {
break;
}
case Token.BREAK:
case Token.CONTINUE:
// We are looking for a control flow changing statement that always
// branches to the same node. If removing it the control flow still
// branches to that same node. It is safe to remove it.
List<DiGraphEdge<Node,Branch>> outEdges = gNode.getOutEdges();
if (outEdges.size() == 1 &&
// If there is a next node, there is no chance this jump is useless.
(n.getNext() == null || n.getNext().getType() == Token.FUNCTION)) {
Preconditions.checkState(outEdges.get(0).getValue() == Branch.UNCOND);
Node fallThrough = tryRemoveUnconditionalBranching(
ControlFlowAnalysis.computeFollowNode(n));
Node nextCfgNode = outEdges.get(0).getDestination().getValue();
if (nextCfgNode == fallThrough) {
removeDeadExprStatementSafely(n);
return fallThrough;
}
}
}
return n;
}
private void removeDeadExprStatementSafely(Node n) {
if (n.getType() == Token.EMPTY ||
(n.getType() == Token.BLOCK && !n.hasChildren())) {
// Not always trivial to remove, let FoldContants work its magic later.
return;
}
// Removing an unreachable DO node is messy because it means we still have
// to execute one iteration. If the DO's body has breaks in the middle, it
// can get even more trickier and code size might actually increase.
switch (n.getType()) {
case Token.DO:
case Token.TRY:
case Token.CATCH:
case Token.FINALLY:
return;
}
NodeUtil.redeclareVarsInsideBranch(n);
compiler.reportCodeChange();
if (logger.isLoggable(Level.FINE)) {
logger.fine("Removing " + n.toString());
}
NodeUtil.removeChild(n.getParent(), n);
}
}
| apache-2.0 |
Frameworkium/frameworkium | src/test/java/theinternet/pages/FileUploadPage.java | 1003 | package theinternet.pages;
import com.frameworkium.core.ui.annotations.Visible;
import com.frameworkium.core.ui.pages.BasePage;
import com.frameworkium.core.ui.pages.PageFactory;
import io.qameta.allure.Step;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
import ru.yandex.qatools.htmlelements.annotations.Name;
import ru.yandex.qatools.htmlelements.element.FileInput;
public class FileUploadPage extends BasePage<FileUploadPage> {
@Visible
@Name("Choose Files button")
@FindBy(css = "input#file-upload")
private FileInput chooseFileInput;
@Visible
@Name("Upload button")
@FindBy(css = "input#file-submit")
private WebElement uploadButton;
@Step("Upload a file by choosing file and then clicking upload")
public FileUploadSuccessPage uploadFile(String filePath) {
chooseFileInput.setFileToUpload(filePath);
uploadButton.click();
return PageFactory.newInstance(FileUploadSuccessPage.class);
}
}
| apache-2.0 |
NiteshKant/RxJava | src/main/java/io/reactivex/internal/operators/flowable/FlowableFlatMapCompletable.java | 7266 | /**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.flowable;
import java.util.concurrent.atomic.AtomicReference;
import org.reactivestreams.*;
import io.reactivex.*;
import io.reactivex.annotations.Nullable;
import io.reactivex.disposables.*;
import io.reactivex.exceptions.Exceptions;
import io.reactivex.functions.Function;
import io.reactivex.internal.disposables.DisposableHelper;
import io.reactivex.internal.functions.ObjectHelper;
import io.reactivex.internal.subscriptions.*;
import io.reactivex.internal.util.AtomicThrowable;
import io.reactivex.plugins.RxJavaPlugins;
/**
* Maps a sequence of values into CompletableSources and awaits their termination.
* @param <T> the value type
*/
public final class FlowableFlatMapCompletable<T> extends AbstractFlowableWithUpstream<T, T> {
final Function<? super T, ? extends CompletableSource> mapper;
final int maxConcurrency;
final boolean delayErrors;
public FlowableFlatMapCompletable(Flowable<T> source,
Function<? super T, ? extends CompletableSource> mapper, boolean delayErrors,
int maxConcurrency) {
super(source);
this.mapper = mapper;
this.delayErrors = delayErrors;
this.maxConcurrency = maxConcurrency;
}
@Override
protected void subscribeActual(Subscriber<? super T> subscriber) {
source.subscribe(new FlatMapCompletableMainSubscriber<T>(subscriber, mapper, delayErrors, maxConcurrency));
}
static final class FlatMapCompletableMainSubscriber<T> extends BasicIntQueueSubscription<T>
implements FlowableSubscriber<T> {
private static final long serialVersionUID = 8443155186132538303L;
final Subscriber<? super T> downstream;
final AtomicThrowable errors;
final Function<? super T, ? extends CompletableSource> mapper;
final boolean delayErrors;
final CompositeDisposable set;
final int maxConcurrency;
Subscription upstream;
volatile boolean cancelled;
FlatMapCompletableMainSubscriber(Subscriber<? super T> subscriber,
Function<? super T, ? extends CompletableSource> mapper, boolean delayErrors,
int maxConcurrency) {
this.downstream = subscriber;
this.mapper = mapper;
this.delayErrors = delayErrors;
this.errors = new AtomicThrowable();
this.set = new CompositeDisposable();
this.maxConcurrency = maxConcurrency;
this.lazySet(1);
}
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.validate(this.upstream, s)) {
this.upstream = s;
downstream.onSubscribe(this);
int m = maxConcurrency;
if (m == Integer.MAX_VALUE) {
s.request(Long.MAX_VALUE);
} else {
s.request(m);
}
}
}
@Override
public void onNext(T value) {
CompletableSource cs;
try {
cs = ObjectHelper.requireNonNull(mapper.apply(value), "The mapper returned a null CompletableSource");
} catch (Throwable ex) {
Exceptions.throwIfFatal(ex);
upstream.cancel();
onError(ex);
return;
}
getAndIncrement();
InnerConsumer inner = new InnerConsumer();
if (!cancelled && set.add(inner)) {
cs.subscribe(inner);
}
}
@Override
public void onError(Throwable e) {
if (errors.addThrowable(e)) {
if (delayErrors) {
if (decrementAndGet() == 0) {
Throwable ex = errors.terminate();
downstream.onError(ex);
} else {
if (maxConcurrency != Integer.MAX_VALUE) {
upstream.request(1);
}
}
} else {
cancel();
if (getAndSet(0) > 0) {
Throwable ex = errors.terminate();
downstream.onError(ex);
}
}
} else {
RxJavaPlugins.onError(e);
}
}
@Override
public void onComplete() {
if (decrementAndGet() == 0) {
Throwable ex = errors.terminate();
if (ex != null) {
downstream.onError(ex);
} else {
downstream.onComplete();
}
} else {
if (maxConcurrency != Integer.MAX_VALUE) {
upstream.request(1);
}
}
}
@Override
public void cancel() {
cancelled = true;
upstream.cancel();
set.dispose();
}
@Override
public void request(long n) {
// ignored, no values emitted
}
@Nullable
@Override
public T poll() throws Exception {
return null; // always empty
}
@Override
public boolean isEmpty() {
return true; // always empty
}
@Override
public void clear() {
// nothing to clear
}
@Override
public int requestFusion(int mode) {
return mode & ASYNC;
}
void innerComplete(InnerConsumer inner) {
set.delete(inner);
onComplete();
}
void innerError(InnerConsumer inner, Throwable e) {
set.delete(inner);
onError(e);
}
final class InnerConsumer extends AtomicReference<Disposable> implements CompletableObserver, Disposable {
private static final long serialVersionUID = 8606673141535671828L;
@Override
public void onSubscribe(Disposable d) {
DisposableHelper.setOnce(this, d);
}
@Override
public void onComplete() {
innerComplete(this);
}
@Override
public void onError(Throwable e) {
innerError(this, e);
}
@Override
public void dispose() {
DisposableHelper.dispose(this);
}
@Override
public boolean isDisposed() {
return DisposableHelper.isDisposed(get());
}
}
}
}
| apache-2.0 |
romartin/drools | kie-dmn/kie-dmn-core/src/main/java/org/kie/dmn/core/compiler/DecisionCompiler.java | 6980 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.dmn.core.compiler;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import org.kie.dmn.api.core.DMNType;
import org.kie.dmn.api.core.ast.BusinessKnowledgeModelNode;
import org.kie.dmn.api.core.ast.DMNNode;
import org.kie.dmn.api.core.ast.DecisionNode;
import org.kie.dmn.api.core.ast.DecisionServiceNode;
import org.kie.dmn.api.core.ast.InputDataNode;
import org.kie.dmn.core.api.DMNExpressionEvaluator;
import org.kie.dmn.core.ast.DecisionNodeImpl;
import org.kie.dmn.core.impl.CompositeTypeImpl;
import org.kie.dmn.core.impl.DMNModelImpl;
import org.kie.dmn.core.util.Msg;
import org.kie.dmn.model.api.DRGElement;
import org.kie.dmn.model.api.Decision;
public class DecisionCompiler implements DRGElementCompiler {
@Override
public boolean accept(DRGElement de) {
return de instanceof Decision;
}
@Override
public void compileNode(DRGElement de, DMNCompilerImpl compiler, DMNModelImpl model) {
Decision decision = (Decision) de;
DecisionNodeImpl dn = new DecisionNodeImpl( decision );
DMNType type = null;
if ( decision.getVariable() == null ) {
DMNCompilerHelper.reportMissingVariable( model, de, decision, Msg.MISSING_VARIABLE_FOR_DECISION );
return;
}
DMNCompilerHelper.checkVariableName( model, decision, decision.getName() );
if ( decision.getVariable() != null && decision.getVariable().getTypeRef() != null ) {
type = compiler.resolveTypeRef(model, decision, decision.getVariable(), decision.getVariable().getTypeRef());
} else {
type = compiler.resolveTypeRef(model, decision, decision, null);
}
dn.setResultType( type );
model.addDecision( dn );
}
@Override
public boolean accept(DMNNode node) {
return node instanceof DecisionNodeImpl;
}
@Override
public void compileEvaluator(DMNNode node, DMNCompilerImpl compiler, DMNCompilerContext ctx, DMNModelImpl model) {
DecisionNodeImpl di = (DecisionNodeImpl) node;
compiler.linkRequirements( model, di );
ctx.enterFrame();
try {
Map<String, DMNType> importedTypes = new HashMap<>();
for( DMNNode dep : di.getDependencies().values() ) {
if( dep instanceof DecisionNode ) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
ctx.setVariable(dep.getName(), ((DecisionNode) dep).getResultType());
} else {
// then the Decision dependency is an imported Decision.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((DecisionNode) dep).getResultType());
}
}
} else if( dep instanceof InputDataNode ) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
ctx.setVariable(dep.getName(), ((InputDataNode) dep).getType());
} else {
// then the InputData dependency is an imported InputData.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((InputDataNode) dep).getType());
}
}
} else if( dep instanceof BusinessKnowledgeModelNode ) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
// might need to create a DMNType for "functions" and replace the type here by that
ctx.setVariable(dep.getName(), ((BusinessKnowledgeModelNode) dep).getResultType());
} else {
// then the BKM dependency is an imported BKM.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((BusinessKnowledgeModelNode) dep).getResultType());
}
}
} else if (dep instanceof DecisionServiceNode) {
if (dep.getModelNamespace().equals(model.getNamespace())) {
// might need to create a DMNType for "functions" and replace the type here by that
ctx.setVariable(dep.getName(), ((DecisionServiceNode) dep).getResultType());
} else {
// then the BKM dependency is an imported BKM.
Optional<String> alias = model.getImportAliasFor(dep.getModelNamespace(), dep.getModelName());
if (alias.isPresent()) {
CompositeTypeImpl importedComposite = (CompositeTypeImpl) importedTypes.computeIfAbsent(alias.get(), a -> new CompositeTypeImpl());
importedComposite.addField(dep.getName(), ((DecisionServiceNode) dep).getResultType());
}
}
}
}
for (Entry<String, DMNType> importedType : importedTypes.entrySet()) {
ctx.setVariable(importedType.getKey(), importedType.getValue());
}
DMNExpressionEvaluator evaluator = compiler.getEvaluatorCompiler().compileExpression( ctx, model, di, di.getName(), di.getDecision().getExpression() );
di.setEvaluator( evaluator );
} finally {
ctx.exitFrame();
}
}
} | apache-2.0 |
darraghgrace/ios-driver | server/src/main/java/org/uiautomation/ios/DeviceStore.java | 4836 | /*
* Copyright 2012-2013 eBay Software Foundation and ios-driver committers
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.uiautomation.ios;
import com.google.common.collect.ImmutableList;
import org.libimobiledevice.ios.driver.binding.exceptions.SDKException;
import org.libimobiledevice.ios.driver.binding.model.ApplicationInfo;
import org.libimobiledevice.ios.driver.binding.model.DeviceInfo;
import org.libimobiledevice.ios.driver.binding.services.DeviceCallBack;
import org.libimobiledevice.ios.driver.binding.services.DeviceService;
import org.libimobiledevice.ios.driver.binding.services.IOSDevice;
import org.libimobiledevice.ios.driver.binding.services.ImageMountingService;
import org.libimobiledevice.ios.driver.binding.services.InformationService;
import org.libimobiledevice.ios.driver.binding.services.InstallerService;
import org.openqa.selenium.WebDriverException;
import org.uiautomation.ios.application.IPAShellApplication;
import org.uiautomation.ios.utils.DDILocator;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.logging.Logger;
public class DeviceStore extends DeviceCallBack {
private static final Logger log = Logger.getLogger(DeviceStore.class.getName());
private final List<RealDevice> reals = new CopyOnWriteArrayList<RealDevice>();
private final List<SimulatorDevice> sims = new CopyOnWriteArrayList<SimulatorDevice>();
private final ApplicationStore apps;
private final Set<String> uuidWhitelist;
public DeviceStore(ApplicationStore apps, Set<String> uuidWhitelist) {
super();
this.apps = apps;
this.uuidWhitelist = uuidWhitelist;
}
/**
* @return immutable copy of the currently available devices.
*/
public List<Device> getDevices() {
List<Device> all = new ArrayList<Device>();
all.addAll(reals);
all.addAll(sims);
return ImmutableList.copyOf(all);
}
public List<RealDevice> getRealDevices() {
return reals;
}
public List<SimulatorDevice> getSimulatorDevices() {
return sims;
}
public void add(SimulatorDevice simulatorDevice) {
sims.add(simulatorDevice);
}
@Override
protected void onDeviceAdded(String uuid) {
if (!uuidWhitelist.isEmpty() && !uuidWhitelist.contains(uuid)) {
log.info("device detected but not whitelisted");
return;
}
RealDevice d = null;
try {
IOSDevice device = DeviceService.get(uuid);
DeviceInfo info = new DeviceInfo(uuid);
d = new RealDevice(info);
log.info("new device detected (" + uuid + ") " + info.getDeviceName());
reals.add(d);
InstallerService s = new InstallerService(device);
String id = "com.apple.mobilesafari";
ApplicationInfo safari = s.getApplication(id);
String v = (String) safari.getProperty("CFBundleVersion");
log.info("device " + info.getDeviceName() + " = safari " + v);
IPAShellApplication ipa = new IPAShellApplication(id, v, safari);
apps.add(ipa);
InformationService i = new InformationService(device);
if (!i.isDevModeEnabled()) {
log.warning(
"The device " + uuid + " is not set to dev mode. It can't be used for testing.");
File ddi = DDILocator.locateDDI(device);
mount(device, ddi);
log.info("DDI mounted.Device now in dev mode.");
}
} catch (SDKException | WebDriverException e) {
if (d != null) {
reals.remove(d);
}
}
}
private void mount(IOSDevice device, File ddi) throws SDKException {
ImageMountingService service = null;
try {
service = new ImageMountingService(device);
service.mount(ddi);
} finally {
if (service != null) {
service.free();
}
}
}
@Override
protected void onDeviceRemoved(String uuid) {
if (!uuidWhitelist.isEmpty() && !uuidWhitelist.contains(uuid)) {
log.info("device removed but not whitelisted");
return;
}
for (RealDevice d : reals) {
if (d.getUuid().equals(uuid)) {
log.info("Removing " + uuid + " for the devices pool");
boolean ok = reals.remove(d);
if (!ok) {
log.warning("device " + uuid + " has been unplugged, but was never there ?");
}
}
}
}
}
| apache-2.0 |
barnyard/pi-sss | src/main/java/com/bt/pi/sss/UserManager.java | 259 | /* (c) British Telecommunications plc, 2009, All Rights Reserved */
package com.bt.pi.sss;
import com.bt.pi.app.common.entities.User;
public interface UserManager {
boolean userExists(String accessKey);
User getUserByAccessKey(String accessKey);
}
| apache-2.0 |
RyanTech/fogger | example/src/main/java/pl/allegro/foggerexample/config/FoggerExampleApplication.java | 2363 | /*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pl.allegro.foggerexample.config;
import android.app.Application;
import android.app.Instrumentation;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
import pl.allegro.foggerexample.config.application.ApplicationRunConfiguration;
import pl.allegro.foggerexample.config.dagger.Injector;
import pl.allegro.foggerexample.config.dagger.module.RootModule;
public class FoggerExampleApplication extends Application {
private static FoggerExampleApplication instance;
private ApplicationRunConfiguration applicationRunConfiguration;
public FoggerExampleApplication() {
}
public FoggerExampleApplication(final Context context) {
super();
attachBaseContext(context);
setInstance(this);
}
@Override
public void onCreate() {
super.onCreate();
initDaggerOnApplicationCreationStep();
Injector.inject(this);
}
private void initDaggerOnApplicationCreationStep() {
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
applicationRunConfiguration = ApplicationRunConfiguration.create(sharedPreferences);
Object[] modules = new Object[]{new RootModule()};
Injector.init(modules);
Injector.injectStatics();
}
private static void setInstance(FoggerExampleApplication foggerExampleApplication) {
instance = foggerExampleApplication;
}
public FoggerExampleApplication(final Instrumentation instrumentation) {
super();
attachBaseContext(instrumentation.getTargetContext());
}
public static FoggerExampleApplication getInstance() {
return instance;
}
}
| apache-2.0 |
jtulach/teavm | jso/impl/src/main/java/org/teavm/jso/impl/NameEmitter.java | 781 | /*
* Copyright 2015 Alexey Andreev.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teavm.jso.impl;
import java.io.IOException;
/**
*
* @author Alexey Andreev
*/
interface NameEmitter {
void emit(int precedence) throws IOException;
}
| apache-2.0 |
raja15792/googleads-java-lib | examples/adwords_axis/src/main/java/adwords/axis/v201502/advancedoperations/UseSharedBiddingStrategy.java | 9544 | // Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package adwords.axis.v201502.advancedoperations;
import com.google.api.ads.adwords.axis.factory.AdWordsServices;
import com.google.api.ads.adwords.axis.v201502.cm.AdvertisingChannelType;
import com.google.api.ads.adwords.axis.v201502.cm.ApiException;
import com.google.api.ads.adwords.axis.v201502.cm.BiddingStrategyConfiguration;
import com.google.api.ads.adwords.axis.v201502.cm.BiddingStrategyOperation;
import com.google.api.ads.adwords.axis.v201502.cm.BiddingStrategyReturnValue;
import com.google.api.ads.adwords.axis.v201502.cm.BiddingStrategyServiceInterface;
import com.google.api.ads.adwords.axis.v201502.cm.Budget;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetBudgetDeliveryMethod;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetBudgetPeriod;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetOperation;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetReturnValue;
import com.google.api.ads.adwords.axis.v201502.cm.BudgetServiceInterface;
import com.google.api.ads.adwords.axis.v201502.cm.Campaign;
import com.google.api.ads.adwords.axis.v201502.cm.CampaignOperation;
import com.google.api.ads.adwords.axis.v201502.cm.CampaignReturnValue;
import com.google.api.ads.adwords.axis.v201502.cm.CampaignServiceInterface;
import com.google.api.ads.adwords.axis.v201502.cm.Money;
import com.google.api.ads.adwords.axis.v201502.cm.NetworkSetting;
import com.google.api.ads.adwords.axis.v201502.cm.Operator;
import com.google.api.ads.adwords.axis.v201502.cm.SharedBiddingStrategy;
import com.google.api.ads.adwords.axis.v201502.cm.TargetSpendBiddingScheme;
import com.google.api.ads.adwords.lib.client.AdWordsSession;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.client.auth.oauth2.Credential;
import java.rmi.RemoteException;
import javax.xml.rpc.ServiceException;
/**
* This example adds a Shared Bidding Strategy and uses it to construct a campaign.
*/
public class UseSharedBiddingStrategy {
// Optional: If you'd like to use an existing shared budget, assign a
// shared budget ID here.
private static final Long SHARED_BUDGET_ID = null;
public static void main(String[] args) throws Exception {
Credential oAuth2Credential = new OfflineCredentials.Builder()
.forApi(Api.ADWORDS)
.fromFile()
.build()
.generateCredential();
// Construct an AdWordsSession.
AdWordsSession session = new AdWordsSession.Builder()
.fromFile()
.withOAuth2Credential(oAuth2Credential)
.build();
AdWordsServices adWordsServices = new AdWordsServices();
runExample(adWordsServices, session, SHARED_BUDGET_ID);
}
public static void runExample(AdWordsServices adWordsServices, AdWordsSession session,
Long sharedBudgetId) throws Exception {
SharedBiddingStrategy sharedBiddingStrategy = createBiddingStrategy(adWordsServices, session);
if (sharedBudgetId == null) {
Budget budget = createSharedBudget(adWordsServices, session);
sharedBudgetId = budget.getBudgetId();
}
createCampaignWithBiddingStrategy(adWordsServices, session, sharedBiddingStrategy.getId(),
sharedBudgetId);
}
/**
* Creates the bidding strategy object.
*
* @param adWordsServices the user to run the example with
* @param session the AdWordsSession
* @throws RemoteException
* @throws ApiException
* @throws ServiceException
*/
private static SharedBiddingStrategy createBiddingStrategy(AdWordsServices adWordsServices,
AdWordsSession session)
throws ApiException, RemoteException, ServiceException {
// Get the BiddingStrategyService, which loads the required classes.
BiddingStrategyServiceInterface biddingStrategyService =
adWordsServices.get(session, BiddingStrategyServiceInterface.class);
// Create a shared bidding strategy.
SharedBiddingStrategy sharedBiddingStrategy = new SharedBiddingStrategy();
sharedBiddingStrategy.setName("Maximize Clicks" + System.currentTimeMillis());
TargetSpendBiddingScheme biddingScheme = new TargetSpendBiddingScheme();
// Optionally set additional bidding scheme parameters.
biddingScheme.setBidCeiling(new Money(null, 2000000L));
biddingScheme.setSpendTarget(new Money(null, 20000000L));
sharedBiddingStrategy.setBiddingScheme(biddingScheme);
// Create operation.
BiddingStrategyOperation operation = new BiddingStrategyOperation();
operation.setOperand(sharedBiddingStrategy);
operation.setOperator(Operator.ADD);
BiddingStrategyOperation[] operations = new BiddingStrategyOperation[] {operation};
BiddingStrategyReturnValue result = biddingStrategyService.mutate(operations);
SharedBiddingStrategy newBiddingStrategy = result.getValue(0);
System.out.printf(
"Shared bidding strategy with name '%s' and ID %d of type %s was created.\n",
newBiddingStrategy.getName(), newBiddingStrategy.getId(),
newBiddingStrategy.getBiddingScheme().getBiddingSchemeType());
return newBiddingStrategy;
}
/**
* Creates an explicit budget to be used only to create the Campaign.
*
* @param adWordsServices the user to run the example with
* @param session the AdWordsSession
* @throws ServiceException
* @throws RemoteException
* @throws ApiException
*/
private static Budget createSharedBudget(AdWordsServices adWordsServices,
AdWordsSession session)
throws ServiceException, ApiException, RemoteException {
// Get the BudgetService, which loads the required classes.
BudgetServiceInterface budgetService =
adWordsServices.get(session, BudgetServiceInterface.class);
// Create a shared budget.
Budget budget = new Budget();
budget.setName("Shared Interplanetary Budget #" + System.currentTimeMillis());
budget.setPeriod(BudgetBudgetPeriod.DAILY);
budget.setAmount(new Money(null, 50000000L));
budget.setDeliveryMethod(BudgetBudgetDeliveryMethod.STANDARD);
budget.setIsExplicitlyShared(true);
BudgetOperation operation = new BudgetOperation();
operation.setOperand(budget);
operation.setOperator(Operator.ADD);
BudgetOperation[] operations = new BudgetOperation[] {operation};
// Make the mutate request.
BudgetReturnValue result = budgetService.mutate(operations);
Budget newBudget = result.getValue(0);
System.out.printf("Budget with name '%s', ID %d was created.\n", newBudget.getName(),
newBudget.getBudgetId());
return newBudget;
}
/**
* Create a Campaign with a Shared Bidding Strategy.
*
* @param adWordsServices the user to run the example with
* @param session the AdWordsSession
* @param biddingStrategyId the bidding strategy id to use
* @param sharedBudgetId the shared budget id to use
* @throws RemoteException
* @throws ApiException
* @throws ServiceException
*/
private static Campaign createCampaignWithBiddingStrategy(
AdWordsServices adWordsServices, AdWordsSession session, Long biddingStrategyId,
Long sharedBudgetId) throws ApiException, RemoteException, ServiceException {
// Get the CampaignService, which loads the required classes.
CampaignServiceInterface campaignService =
adWordsServices.get(session, CampaignServiceInterface.class);
// Create campaign.
Campaign campaign = new Campaign();
campaign.setName("Interplanetary Cruise #" + System.currentTimeMillis());
// Set the budget.
Budget budget = new Budget();
budget.setBudgetId(sharedBudgetId);
campaign.setBudget(budget);
// Set bidding strategy (required).
BiddingStrategyConfiguration biddingStrategyConfiguration = new BiddingStrategyConfiguration();
biddingStrategyConfiguration.setBiddingStrategyId(biddingStrategyId);
campaign.setBiddingStrategyConfiguration(biddingStrategyConfiguration);
// Set advertising channel type (required).
campaign.setAdvertisingChannelType(AdvertisingChannelType.SEARCH);
// Set network targeting (recommended).
NetworkSetting networkSetting = new NetworkSetting();
networkSetting.setTargetGoogleSearch(true);
networkSetting.setTargetSearchNetwork(true);
networkSetting.setTargetContentNetwork(true);
campaign.setNetworkSetting(networkSetting);
// Create operation.
CampaignOperation operation = new CampaignOperation();
operation.setOperand(campaign);
operation.setOperator(Operator.ADD);
CampaignReturnValue result = campaignService.mutate(new CampaignOperation[] {operation});
Campaign newCampaign = result.getValue(0);
System.out.printf("Campaign with name '%s', ID %d and bidding scheme ID %d was created.\n",
newCampaign.getName(), newCampaign.getId(),
newCampaign.getBiddingStrategyConfiguration().getBiddingStrategyId());
return newCampaign;
}
}
| apache-2.0 |
skycow/Open-Storefront | server/openstorefront/openstorefront-web/src/main/java/edu/usu/sdl/openstorefront/web/init/ApplicationInit.java | 1105 | /*
* Copyright 2014 Space Dynamics Laboratory - Utah State University Research Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.usu.sdl.openstorefront.web.init;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.servlet.annotation.WebListener;
/**
*
* @author dshurtleff
*/
@WebListener
public class ApplicationInit
implements ServletContextListener
{
@Override
public void contextInitialized(ServletContextEvent sce)
{
}
@Override
public void contextDestroyed(ServletContextEvent sce)
{
}
}
| apache-2.0 |
reynoldsm88/drools | kie-ci/src/test/java/org/kie/scanner/KieModuleIncrementalCompilationTest.java | 10918 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.scanner;
import java.util.Collection;
import java.util.HashMap;
import org.drools.compiler.kie.builder.impl.MessageImpl;
import org.junit.Test;
import org.kie.api.KieServices;
import org.kie.api.builder.KieBuilder;
import org.kie.api.builder.KieFileSystem;
import org.kie.api.builder.ReleaseId;
import org.kie.api.builder.model.KieModuleModel;
import org.kie.api.conf.EqualityBehaviorOption;
import org.kie.api.conf.EventProcessingOption;
import org.kie.internal.builder.IncrementalResults;
import org.kie.internal.builder.InternalKieBuilder;
import static org.junit.Assert.*;
public class KieModuleIncrementalCompilationTest extends AbstractKieCiTest {
@Test
public void testCheckMetaDataAfterIncrementalDelete() throws Exception {
String drl1 = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
"end\n";
String drl2 = "package org.kie.scanner\n" +
"rule R2_2 when\n" +
" String( )\n" +
"then\n" +
"end\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem()
.write( "src/main/resources/r1.drl", drl1 )
.write( "src/main/resources/r2.drl", drl2 );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 2, getRuleNames( kieBuilder ).get( "org.kie.scanner" ).size() );
kfs.delete( "src/main/resources/r2.drl" );
IncrementalResults addResults = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/r2.drl" ).build();
assertEquals( 1, getRuleNames( kieBuilder ).get( "org.kie.scanner" ).size() );
}
private HashMap<String, Collection<String>> getRuleNames( KieBuilder kieBuilder ) {
KieModuleMetaData kieModuleMetaData = getKieModuleMetaData( kieBuilder );
HashMap<String, Collection<String>> ruleNames = new HashMap<String, Collection<String>>();
for ( String packageName : kieModuleMetaData.getPackages() ) {
ruleNames.put( packageName, kieModuleMetaData.getRuleNamesInPackage( packageName ) );
}
return ruleNames;
}
private KieModuleMetaData getKieModuleMetaData( KieBuilder kieBuilder ) {
return KieModuleMetaData.Factory.newKieModuleMetaData( ( (InternalKieBuilder) kieBuilder ).getKieModuleIgnoringErrors() );
}
@Test
public void testIncrementalCompilationFirstBuildHasErrors() throws Exception {
KieServices ks = KieServices.Factory.get();
//Malformed POM - No Version information
ReleaseId releaseId = ks.newReleaseId( "org.kie", "incremental-test-with-invalid pom", "" );
KieFileSystem kfs = createKieFileSystemWithKProject( ks );
kfs.writePomXML( getPom( releaseId ) );
//Valid
String drl1 =
"rule R1 when\n" +
" $s : String()\n" +
"then\n" +
"end\n";
//Invalid
String drl2 =
"rule R2 when\n" +
" $s : Strin( )\n" +
"then\n" +
"end\n";
//Write Rule 1 - No DRL errors, but POM is in error
kfs.write( "src/main/resources/KBase1/r1.drl", drl1 );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 1,
kieBuilder.getResults().getMessages( org.kie.api.builder.Message.Level.ERROR ).size() );
//Add file with error - expect 1 "added" error message
kfs.write( "src/main/resources/KBase1/r2.drl", drl2 );
IncrementalResults addResults = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/KBase1/r2.drl" ).build();
assertEquals( 1, addResults.getAddedMessages().size() );
assertEquals( 0, addResults.getRemovedMessages().size() );
}
@Test
public void checkIncrementalCompilationWithRuleFunctionRule() throws Exception {
String rule_1 = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
"end\n";
String rule_2 = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
" System.out.println(MyFunction());\n" +
"end\n";
String function = "package org.kie.scanner\n" +
"function int MyFunction() {\n" +
" return 1;\n" +
"}\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem();
kfs.write( "src/main/resources/org/kie/scanner/rule.drl", rule_1 );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 0,
kieBuilder.getResults().getMessages( org.kie.api.builder.Message.Level.ERROR ).size() );
kfs.write( "src/main/resources/org/kie/scanner/function.drl", function );
IncrementalResults addResults1 = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/org/kie/scanner/function.drl" ).build();
assertEquals( 0, addResults1.getAddedMessages().size() );
assertEquals( 0, addResults1.getRemovedMessages().size() );
kfs.write( "src/main/resources/org/kie/scanner/rule.drl", rule_2 );
IncrementalResults addResults2 = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/org/kie/scanner/rule.drl" ).build();
assertEquals( 0, addResults2.getAddedMessages().size() );
assertEquals( 0, addResults2.getRemovedMessages().size() );
}
@Test
public void checkIncrementalCompilationWithRuleThenFunction() throws Exception {
String rule = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
" System.out.println(MyFunction());\n" +
"end\n";
String function = "package org.kie.scanner\n" +
"function int MyFunction() {\n" +
" return 1;\n" +
"}\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem();
kfs.write( "src/main/resources/org/kie/scanner/rule.drl", rule );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 1,
kieBuilder.getResults().getMessages( org.kie.api.builder.Message.Level.ERROR ).size() );
kfs.write( "src/main/resources/org/kie/scanner/function.drl", function );
IncrementalResults addResults1 = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/org/kie/scanner/function.drl" ).build();
assertEquals( 0, addResults1.getAddedMessages().size() );
assertEquals( 1, addResults1.getRemovedMessages().size() );
}
@Test
public void checkIncrementalCompilationWithFunctionThenRule() throws Exception {
String rule = "package org.kie.scanner\n" +
"rule R1 when\n" +
" String()\n" +
"then\n" +
" System.out.println(MyFunction());\n" +
"end\n";
String function = "package org.kie.scanner\n" +
"function int MyFunction() {\n" +
" return 1;\n" +
"}\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = ks.newKieFileSystem();
kfs.write( "src/main/resources/org/kie/scanner/function.drl", function );
KieBuilder kieBuilder = ks.newKieBuilder( kfs ).buildAll();
assertEquals( 0,
kieBuilder.getResults().getMessages( org.kie.api.builder.Message.Level.ERROR ).size() );
kfs.write( "src/main/resources/org/kie/scanner/rule.drl", rule );
IncrementalResults addResults = ( (InternalKieBuilder) kieBuilder ).createFileSet( "src/main/resources/org/kie/scanner/rule.drl" ).build();
assertEquals( 0, addResults.getAddedMessages().size() );
assertEquals( 0, addResults.getRemovedMessages().size() );
}
@Test
public void checkIncrementalCompilationWithMultipleKieBases() throws Exception {
String rule = "package org.kie.scanner\n" +
"rule R1 when\n" +
"then\n" +
"end\n";
String invalidRule = "package org.kie.scanner\n" +
"rule R2 when\n" +
" Cheese()\n" + // missing import
"then\n" +
"end\n";
KieServices ks = KieServices.Factory.get();
KieFileSystem kfs = createKieFileSystemWithTwoKBases(ks);
kfs.write("src/main/resources/org/kie/scanner/rule.drl",
rule);
KieBuilder kieBuilder = ks.newKieBuilder(kfs).buildAll();
assertEquals(0,
kieBuilder.getResults().getMessages().size());
kfs.write("src/main/resources/org/kie/scanner/invalidRule.drl",
invalidRule);
IncrementalResults addResults = ((InternalKieBuilder) kieBuilder).createFileSet("src/main/resources/org/kie/scanner/invalidRule.drl").build();
assertEquals(2, addResults.getAddedMessages().size());
addResults
.getAddedMessages()
.stream()
.map(m -> (MessageImpl) m )
.forEach(m -> assertNotNull(m.getKieBaseName()));
}
private KieFileSystem createKieFileSystemWithTwoKBases(final KieServices ks) {
final KieModuleModel kproj = ks.newKieModuleModel();
kproj.newKieBaseModel("default").setDefault(true)
.setEqualsBehavior( EqualityBehaviorOption.EQUALITY )
.setEventProcessingMode( EventProcessingOption.STREAM );
kproj.newKieBaseModel("kbase1").setDefault(false)
.setEqualsBehavior(EqualityBehaviorOption.EQUALITY)
.setEventProcessingMode(EventProcessingOption.STREAM);
final KieFileSystem kfs = ks.newKieFileSystem();
kfs.writeKModuleXML(kproj.toXML());
return kfs;
}
}
| apache-2.0 |
suraj-raturi/pinpoint | profiler/src/main/java/com/navercorp/pinpoint/profiler/sender/grpc/ReconnectExecutor.java | 3111 | /*
* Copyright 2019 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.profiler.sender.grpc;
import com.navercorp.pinpoint.common.util.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
/**
* @author Woonduk Kang(emeroad)
*/
public class ReconnectExecutor {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private volatile boolean shutdown;
private final ScheduledExecutorService scheduledExecutorService;
private final AtomicLong rejectedCounter = new AtomicLong();
public ReconnectExecutor(ScheduledExecutorService scheduledExecutorService) {
this.scheduledExecutorService = Assert.requireNonNull(scheduledExecutorService, "scheduledExecutorService");
}
private void execute0(Runnable command) {
Assert.requireNonNull(command, "command");
if (shutdown) {
logger.debug("already shutdown");
return;
}
if (command instanceof ReconnectJob) {
ReconnectJob reconnectJob = (ReconnectJob) command;
try {
scheduledExecutorService.schedule(reconnectJob, reconnectJob.nextBackoffNanos(), TimeUnit.NANOSECONDS);
} catch (RejectedExecutionException e) {
final long failCount = rejectedCounter.incrementAndGet();
logger.info("{} reconnectJob scheduled fail {}", command, failCount);
}
} else {
throw new IllegalArgumentException("unknown command type " + command);
}
}
public void close() {
shutdown = true;
}
public Reconnector newReconnector(Runnable reconnectJob) {
Assert.requireNonNull(reconnectJob, "reconnectJob");
if (logger.isInfoEnabled()) {
logger.info("newReconnector(reconnectJob = [{}])", reconnectJob);
}
final Executor dispatch = new Executor() {
@Override
public void execute(Runnable command) {
ReconnectExecutor.this.execute0(command);
}
};
final ReconnectJob reconnectJobWrap = wrapReconnectJob(reconnectJob);
return new ReconnectAdaptor(dispatch, reconnectJobWrap);
}
private ReconnectJob wrapReconnectJob(Runnable runnable) {
return new ExponentialBackoffReconnectJob(runnable);
}
}
| apache-2.0 |
apache/solr | solr/core/src/java/org/apache/solr/internal/csv/CSVUtils.java | 4128 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.internal.csv;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
/** Utility methods for dealing with CSV files */
public class CSVUtils {
private static final String[] EMPTY_STRING_ARRAY = new String[0];
private static final String[][] EMPTY_DOUBLE_STRING_ARRAY = new String[0][0];
/**
* <code>CSVUtils</code> instances should NOT be constructed in standard programming.
*
* <p>This constructor is public to permit tools that require a JavaBean instance to operate.
*/
public CSVUtils() {}
/**
* Converts an array of string values into a single CSV line. All <code>null</code> values are
* converted to the string <code>"null"</code>, all strings equal to <code>"null"</code> will
* additionally get quotes around.
*
* @param values the value array
* @return the CSV string, will be an empty string if the length of the value array is 0
*/
public static String printLine(String[] values, CSVStrategy strategy) {
// set up a CSVUtils
StringWriter stringWriter = new StringWriter();
CSVPrinter csvPrinter = new CSVPrinter(stringWriter, strategy);
// check for null values an "null" as strings and convert them
// into the strings "null" and "\"null\""
for (int i = 0; i < values.length; i++) {
if (values[i] == null) {
values[i] = "null";
} else if (values[i].equals("null")) {
values[i] = "\"null\"";
}
}
// convert to CSV
try {
csvPrinter.println(values);
} catch (IOException e) {
// should not happen with StringWriter
}
// as the resulting string has \r\n at the end, we will trim that away
return stringWriter.toString().trim();
}
// ======================================================
// static parsers
// ======================================================
/**
* Parses the given String according to the default {@link CSVStrategy}.
*
* @param s CSV String to be parsed.
* @return parsed String matrix (which is never null)
* @throws IOException in case of error
*/
public static String[][] parse(String s) throws IOException {
if (s == null) {
throw new IllegalArgumentException("Null argument not allowed.");
}
String[][] result = (new CSVParser(new StringReader(s))).getAllValues();
if (result == null) {
// since CSVStrategy ignores empty lines an empty array is returned
// (i.e. not "result = new String[][] {{""}};")
result = EMPTY_DOUBLE_STRING_ARRAY;
}
return result;
}
/**
* Parses the first line only according to the default {@link CSVStrategy}.
*
* <p>Parsing empty string will be handled as valid records containing zero elements, so the
* following property holds: parseLine("").length == 0.
*
* @param s CSV String to be parsed.
* @return parsed String vector (which is never null)
* @throws IOException in case of error
*/
public static String[] parseLine(String s) throws IOException {
if (s == null) {
throw new IllegalArgumentException("Null argument not allowed.");
}
// uh,jh: make sure that parseLine("").length == 0
if (s.length() == 0) {
return EMPTY_STRING_ARRAY;
}
return (new CSVParser(new StringReader(s))).getLine();
}
}
| apache-2.0 |
pradyutsarma/autosleep | spring-apps/autowakeup-proxy/src/main/java/org/cloudfoundry/autosleep/ui/proxy/HttpClientConfiguration.java | 3689 | /*
* Autosleep
* Copyright (C) 2016 Orange
* Authors: Benjamin Einaudi benjamin.einaudi@orange.com
* Arnaud Ruffin arnaud.ruffin@orange.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.autosleep.ui.proxy;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.http.client.SimpleClientHttpRequestFactory;
import org.springframework.web.client.RestTemplate;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
@Configuration
public class HttpClientConfiguration {
@Value("${autowakeup.skip.ssl.validation:false}")
private boolean skipSslValidation;
private SSLContext buildSslContext(TrustManager trustManager) {
try {
SSLContext sslContext = SSLContext.getInstance("SSL");
sslContext.init(null, new TrustManager[]{trustManager}, null);
return sslContext;
} catch (KeyManagementException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
private TrustManager buildTrustAllCerts() {
return new X509TrustManager() {
@Override
public void checkClientTrusted(X509Certificate[] certificates, String client) throws CertificateException {
}
@Override
public void checkServerTrusted(X509Certificate[] certificates, String client) throws CertificateException {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return null;
}
};
}
private HostnameVerifier buildVerifyNoHostname() {
return (hostname, session) -> true;
}
@Bean
public RestTemplate restTemplate() {
if (!skipSslValidation) {
return new RestTemplate();
} else {
final HostnameVerifier hostnameVerifier = buildVerifyNoHostname();
final SSLContext sslContext = buildSslContext(buildTrustAllCerts());
return new RestTemplate(new SimpleClientHttpRequestFactory() {
@Override
protected void prepareConnection(HttpURLConnection connection, String httpMethod) throws IOException {
if (connection instanceof HttpsURLConnection) {
HttpsURLConnection secureConnection = (HttpsURLConnection) connection;
secureConnection.setHostnameVerifier(hostnameVerifier);
secureConnection.setSSLSocketFactory(sslContext.getSocketFactory());
}
super.prepareConnection(connection, httpMethod);
}
});
}
}
}
| apache-2.0 |
antoinesd/weld-core | tests-arquillian/src/test/java/org/jboss/weld/tests/event/observer/transactional/Pomeranian.java | 2906 | /*
* JBoss, Home of Professional Open Source
* Copyright 2010, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.tests.event.observer.transactional;
import static javax.ejb.TransactionManagementType.BEAN;
import static javax.enterprise.event.TransactionPhase.AFTER_COMPLETION;
import static javax.enterprise.event.TransactionPhase.AFTER_FAILURE;
import static javax.enterprise.event.TransactionPhase.AFTER_SUCCESS;
import static javax.enterprise.event.TransactionPhase.BEFORE_COMPLETION;
import static javax.enterprise.event.TransactionPhase.IN_PROGRESS;
import java.io.Serializable;
import javax.annotation.Priority;
import javax.ejb.Stateful;
import javax.ejb.TransactionManagement;
import javax.enterprise.context.SessionScoped;
import javax.enterprise.event.Observes;
@Stateful
@TransactionManagement(BEAN)
@Tame
@SessionScoped
@SuppressWarnings("serial")
public class Pomeranian implements PomeranianInterface, Serializable {
@Override
public void observeInProgress(@Observes(during = IN_PROGRESS) Bark event) {
Actions.add(IN_PROGRESS);
}
@Override
public void observeAfterCompletion(@Observes(during = AFTER_COMPLETION) Bark someEvent) {
Actions.add(AFTER_COMPLETION);
}
@Override
public void observeAfterSuccess(@Observes(during = AFTER_SUCCESS) Bark event) {
Actions.add(AFTER_SUCCESS);
}
@Override
public void observeAfterSuccessWithHighPriority(@Priority(1) @Observes(during = AFTER_SUCCESS) Bark event) {
Actions.add(AFTER_SUCCESS + "1");
}
@Override
public void observeAfterSuccessWithLowPriority(@Priority(100) @Observes(during = AFTER_SUCCESS) Bark event) {
Actions.add(AFTER_SUCCESS + "100");
}
@Override
public void observeAfterFailure(@Observes(during = AFTER_FAILURE) Bark event) {
Actions.add(AFTER_FAILURE);
}
@Override
public void observeBeforeCompletion(@Observes(during = BEFORE_COMPLETION) Bark event) {
Actions.add(BEFORE_COMPLETION);
}
@Override
public void observeAndFail(@Observes(during=BEFORE_COMPLETION) @Gnarly Bark event) throws FooException {
Actions.add(BEFORE_COMPLETION);
throw new FooException();
}
}
| apache-2.0 |
Microsoft/BeanSpy | test/code/JEE/Common/src/com/interopbridges/scx/webservices/Endpoint.java | 2617 | /**
* Copyright (c) Microsoft Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0.
*
* THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
* OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
* ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
* MERCHANTABLITY OR NON-INFRINGEMENT.
*
* See the Apache Version 2.0 License for specific language governing
* permissions and limitations under the License.
*/
package com.interopbridges.scx.webservices;
/**
*
* <p>
* Concrete representation of an Endpoint to match what is described in the web
* service's WSDL.
* </p>
*
* <p>
*
* <pre>
* <service name="CalculatorService">
* <port name="CalculatorPort" binding="tns:CalculatorPortBinding">
* <soap:address location="http://scxom64-ws7-02:9080/WebServiceProject/CalculatorService" />
* </port>
* </service>
* </pre>
*
* </p>
*
* <p>
* Typically this might look like:
* <ol>
* <li><b>http://scxom64-ws7-02:9080/WebServiceProject/CalculatorService</b></li>
* <li><b>http://scxom-ws7-02:8080/axis2/services/DinnerFinderService</li>
* DinnerFinderServiceHttpSoap11Endpoint/</b>
* </ol>>
* </p>
*
* @author Christopher Crammond
*/
public class Endpoint implements EndpointMBean {
/**
* Key for describing for the (interopbridges) JMX type of MBean
*/
private String _jmxType = "endpoint";
/**
* String representing the full URL of the endpoint address. This should
* match the soap:address's location attribute from the WSDL. <br>
*
*/
private String _url;
/**
* Empty Constructor. It is considered to be a best practice to create this
* default constructor rather than relying on the compiler to auto-generate
* it.
*/
public Endpoint() {
this._url = "";
}
/**
* Preferred Constructor
*
* @param url
* String representing the full URL of the endpoint address.
*/
public Endpoint(String url) {
this._url = url;
}
/*
* (non-Javadoc)
*
* @see com.interopbridges.scx.webservices.EndpointMBean#getUrl()
*/
public String getUrl() {
return this._url;
}
/*
* (non-Javadoc)
*
* @see com.interopbridges.scx.webservices.IMBean#getJmxType()
*/
public String getJmxType() {
return this._jmxType;
}
}
| apache-2.0 |
charliemblack/geode | geode-core/src/main/java/org/apache/geode/cache/snapshot/SnapshotOptions.java | 3388 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.snapshot;
import java.io.Serializable;
import org.apache.geode.internal.cache.snapshot.SnapshotFileMapper;
/**
* Provides a way to configure the behavior of snapshot operations. The default options are:
* <dl>
* <dt>filter</dt>
* <dd>null</dd>
* </dl>
*
* @param <K> the cache entry key type
* @param <V> the cache entry value type
*
* @since GemFire 7.0
*/
public interface SnapshotOptions<K, V> extends Serializable {
/**
* Defines the available snapshot file formats.
*
* @since GemFire 7.0
*/
enum SnapshotFormat {
/** an optimized binary format specific to GemFire */
GEMFIRE
}
/**
* Sets a filter to apply to snapshot entries. Entries that are accepted by the filter will be
* included in import and export operations.
*
* @param filter the filter to apply, or null to remove the filter
* @return the snapshot options
*/
SnapshotOptions<K, V> setFilter(SnapshotFilter<K, V> filter);
/**
* Returns the filter to be applied to snapshot entries. Entries that are accepted by the filter
* will be included in import and export operations.
*
* @return the filter, or null if the filter is not set
*/
SnapshotFilter<K, V> getFilter();
/**
* Sets whether to invoke callbacks when loading a snapshot. The default is false.
*
* @param invokeCallbacks
*
* @return the snapshot options
*/
SnapshotOptions<K, V> invokeCallbacks(boolean invokeCallbacks);
/**
* Returns whether loading a snapshot causes callbacks to be invoked
*
* @return whether loading a snapshot causes callbacks to be invoked
*/
boolean shouldInvokeCallbacks();
/**
* Returns true if the snapshot operation will proceed in parallel.
*
* @return true if the parallel mode has been enabled
*
* @since Geode 1.3
*/
boolean isParallelMode();
/**
* Enables parallel mode for snapshot export, which will cause each member of a partitioned region
* to save its local data set (ignoring redundant copies) to a separate snapshot file.
*
* <p>
* Parallelizing snapshot operations may yield significant performance improvements for large data
* sets. This is particularly true when each member is writing to separate physical disks.
* <p>
* This flag is ignored for replicated regions.
*
* @param parallel true if the snapshot operations will be performed in parallel
* @return the snapshot options
*
* @see SnapshotFileMapper
*
* @since Geode 1.3
*/
SnapshotOptions<K, V> setParallelMode(boolean parallel);
}
| apache-2.0 |
antoinesd/weld-core | impl/src/main/java/org/jboss/weld/util/bean/SerializableForwardingInjectionPoint.java | 1478 | /*
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.util.bean;
import java.io.Serializable;
import javax.enterprise.inject.spi.InjectionPoint;
import org.jboss.weld.injection.ForwardingInjectionPoint;
import org.jboss.weld.serialization.InjectionPointHolder;
public class SerializableForwardingInjectionPoint extends ForwardingInjectionPoint implements Serializable {
private static final long serialVersionUID = 7803445899943317029L;
private final InjectionPointHolder ip;
public SerializableForwardingInjectionPoint(String contextId, InjectionPoint ip) {
this.ip = new InjectionPointHolder(contextId, ip);
}
@Override
protected InjectionPoint delegate() {
return ip.get();
}
}
| apache-2.0 |