text
stringlengths 7
1.01M
|
|---|
/*
* Copyright 2020 Fraunhofer Institute for Software and Systems Engineering
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.dataspaceconnector.service.usagecontrol;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.time.format.DateTimeParseException;
import io.dataspaceconnector.config.ConnectorConfiguration;
import io.dataspaceconnector.config.UsageControlFramework;
import io.dataspaceconnector.exception.ResourceNotFoundException;
import io.dataspaceconnector.service.ids.DeserializationService;
import io.dataspaceconnector.service.resource.AgreementService;
import io.dataspaceconnector.service.resource.ArtifactService;
import io.dataspaceconnector.util.ContractUtils;
import io.dataspaceconnector.util.RuleUtils;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.extern.log4j.Log4j2;
import org.springframework.data.domain.Pageable;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
/**
* This class implements automated policy check.
*/
@EnableScheduling
@Log4j2
@RequiredArgsConstructor
@Service
public class ScheduledDataRemoval {
/**
* The delay of the scheduler.
*/
private static final int FIXED_DELAY = 60_000;
/**
* Service for configuring policy settings.
*/
private final @NonNull ConnectorConfiguration connectorConfig;
/**
* Service for ids deserialization.
*/
private final @NonNull DeserializationService deserializationService;
/**
* Service for ids deserialization.
*/
private final @NonNull AgreementService agreementService;
/**
* Service for updating artifacts.
*/
private final @NonNull ArtifactService artifactService;
/**
* Periodically checks agreements for data deletion.
*/
@Scheduled(fixedDelay = FIXED_DELAY)
public void schedule() {
try {
if (connectorConfig.getUcFramework() == UsageControlFramework.INTERNAL) {
if (log.isInfoEnabled()) {
log.info("Scanning agreements...");
}
scanAgreements();
}
} catch (IllegalArgumentException | DateTimeParseException | ResourceNotFoundException e) {
if (log.isWarnEnabled()) {
log.warn("Failed to check policy. [exception=({})]", e.getMessage());
}
}
}
/**
* Checks all known agreements for artifacts that have to be deleted.
*
* @throws DateTimeParseException If a date from a policy cannot be parsed.
* @throws IllegalArgumentException If the rule could not be deserialized.
* @throws ResourceNotFoundException If the data could not be deleted.
*/
private void scanAgreements() throws DateTimeParseException, IllegalArgumentException,
ResourceNotFoundException {
for (final var agreement : agreementService.getAll(Pageable.unpaged())) {
final var value = agreement.getValue();
final var idsAgreement = deserializationService.getContractAgreement(value);
for (final var rule : ContractUtils.extractRulesFromContract(idsAgreement)) {
if (RuleUtils.checkRuleForPostDuties(rule)) {
removeDataFromArtifact(rule.getTarget());
}
}
}
}
/**
* Delete data by artifact id.
*
* @param target The artifact id.
* @throws ResourceNotFoundException If the artifact update fails.
*/
private void removeDataFromArtifact(final URI target) throws ResourceNotFoundException {
final var artifactId = artifactService.identifyByRemoteId(target);
if (artifactId.isPresent()) {
// Update data for artifact.
try {
artifactService.setData(artifactId.get(), InputStream.nullInputStream());
if (log.isDebugEnabled()) {
log.debug("Removed data from artifact. [target=({})]", artifactId);
}
} catch (IOException e) {
if (log.isWarnEnabled()) {
log.warn("Failed to remove data from artifact. [target=({})]",
artifactId);
}
}
}
}
}
|
package com.redhat.samples.ws;
import javax.jws.WebMethod;
import javax.jws.WebParam;
import javax.jws.WebResult;
import javax.jws.WebService;
import javax.xml.ws.RequestWrapper;
import javax.xml.ws.ResponseWrapper;
import com.redhat.samples.ws.model.Entity;
@WebService(targetNamespace = "http://www.redhat.com/samples/ws/SimpleService/", name = "SimpleService")
public interface SimpleService {
@WebResult(name = "entity", targetNamespace = "")
@RequestWrapper(localName = "SimpleType", targetNamespace = "http://www.redhat.com/samples/ws/SimpleService/", className = "com.redhat.samples.ws.request.SimpleType")
@WebMethod(operationName = "test", action = "http://www.redhat.com/samples/ws/SimpleService/test")
@ResponseWrapper(localName = "SimpleResponseType", targetNamespace = "http://www.redhat.com/samples/ws/SimpleService/", className = "com.redhat.samples.ws.response.SimpleResponseType")
Entity test(@WebParam(name = "in", targetNamespace = "") String in);
}
|
/*
* ProGuard -- shrinking, optimization, obfuscation, and preverification
* of Java bytecode.
*
* Copyright (c) 2002-2012 Eric Lafortune (eric@graphics.cornell.edu)
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package proguard.classfile.util;
import proguard.util.*;
import java.io.PrintStream;
import java.util.List;
/**
* This class prints out and counts warnings.
*
* @author Eric Lafortune
*/
public class WarningPrinter
{
private final PrintStream printStream;
private final StringMatcher classFilter;
private int warningCount;
/**
* Creates a new WarningPrinter that prints to the System.err print stream.
*/
public WarningPrinter()
{
this(System.err);
}
/**
* Creates a new WarningPrinter that prints to the given print stream.
*/
public WarningPrinter(PrintStream printStream)
{
this.printStream = printStream;
this.classFilter = null;
}
/**
* Creates a new WarningPrinter that prints to the given print stream,
* except if the names of any involved classes matches the given filter.
*/
public WarningPrinter(PrintStream printStream, List classFilter)
{
this.printStream = printStream;
this.classFilter = classFilter == null ? null :
new ListParser(new ClassNameParser()).parse(classFilter);
}
/**
* Prints out the given warning and increments the warning count, if
* the given class name passes the class name filter.
*/
public void print(String className, String warning)
{
if (accepts(className))
{
print(warning);
}
}
/**
* Returns whether the given class name passes the class name filter.
*/
public boolean accepts(String className)
{
return classFilter == null ||
!classFilter.matches(className);
}
/**
* Prints out the given warning and increments the warning count, if
* the given class names pass the class name filter.
*/
public void print(String className1, String className2, String warning)
{
if (accepts(className1, className2))
{
print(warning);
}
}
/**
* Returns whether the given class names pass the class name filter.
*/
public boolean accepts(String className1, String className2)
{
return classFilter == null ||
!(classFilter.matches(className1) ||
classFilter.matches(className2));
}
/**
* Prints out the given warning and increments the warning count.
*/
private void print(String warning)
{
printStream.println(warning);
warningCount++;
}
/**
* Returns the number of warnings printed so far.
*/
public int getWarningCount()
{
return warningCount;
}
}
|
package com.lzhlyle.leetcode.self.no546;
public class RemoveBoxes_DP {
// dp
public int removeBoxes(int[] boxes) {
int n = boxes.length;
return dfs(boxes, new int[n][n][n], 0, n - 1, 0);
}
// dp[l][r][k]: 从 l 到 r,之后还有 k 个与 arr[r] 相同
private int dfs(int[] arr, int[][][] dp, int l, int r, int k) {
if (l > r) return 0;
if (dp[l][r][k] != 0) return dp[l][r][k];
while (l < r && arr[r] == arr[r - 1]) {
r--;
k++;
}
// 策略1:直接消 [r, end] 之后的
int max = (k + 1) * (k + 1) + dfs(arr, dp, l, r - 1, 0);
// 策略2:先消 (i, r),再消 [r, end] 之后的
for (int i = l; i < r; i++)
if (arr[i] == arr[r])
max = Math.max(max,
dfs(arr, dp, i + 1, r - 1, 0)) +
dfs(arr, dp, l, i, k + 1);
return dp[l][r][k] = max;
}
}
|
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.kafka.core;
import org.apache.kafka.clients.producer.Producer;
import org.springframework.transaction.support.ResourceHolderSupport;
/**
* Kafka resource holder, wrapping a Kafka producer. KafkaTransactionManager binds instances of this
* class to the thread, for a given Kafka producer factory.
*
* @param <K> the key type.
* @param <V> the value type.
*
* @author Gary Russell
*/
public class KafkaResourceHolder<K, V> extends ResourceHolderSupport {
private final Producer<K, V> producer;
/**
* Construct an instance for the producer.
* @param producer the producer.
*/
public KafkaResourceHolder(Producer<K, V> producer) {
this.producer = producer;
}
public Producer<K, V> getProducer() {
return this.producer;
}
public void commit() {
this.producer.commitTransaction();
}
public void close() {
this.producer.close();
}
public void rollback() {
this.producer.abortTransaction();
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.swift.http;
import org.apache.commons.httpclient.DefaultHttpMethodRetryHandler;
import org.apache.commons.httpclient.Header;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpHost;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.HttpMethodBase;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.methods.DeleteMethod;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.methods.HeadMethod;
import org.apache.commons.httpclient.methods.InputStreamRequestEntity;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.PutMethod;
import org.apache.commons.httpclient.methods.StringRequestEntity;
import org.apache.commons.httpclient.params.HttpMethodParams;
import org.apache.commons.httpclient.protocol.DefaultProtocolSocketFactory;
import org.apache.commons.httpclient.protocol.Protocol;
import org.apache.commons.httpclient.protocol.ProtocolSocketFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.swift.auth.ApiKeyAuthenticationRequest;
import org.apache.hadoop.fs.swift.auth.ApiKeyCredentials;
import org.apache.hadoop.fs.swift.auth.AuthenticationRequest;
import org.apache.hadoop.fs.swift.auth.AuthenticationRequestWrapper;
import org.apache.hadoop.fs.swift.auth.AuthenticationResponse;
import org.apache.hadoop.fs.swift.auth.AuthenticationWrapper;
import org.apache.hadoop.fs.swift.auth.PasswordAuthenticationRequest;
import org.apache.hadoop.fs.swift.auth.PasswordCredentials;
import org.apache.hadoop.fs.swift.auth.entities.AccessToken;
import org.apache.hadoop.fs.swift.auth.entities.Catalog;
import org.apache.hadoop.fs.swift.auth.entities.Endpoint;
import org.apache.hadoop.fs.swift.exceptions.SwiftBadRequestException;
import org.apache.hadoop.fs.swift.exceptions.SwiftConfigurationException;
import org.apache.hadoop.fs.swift.exceptions.SwiftConnectionException;
import org.apache.hadoop.fs.swift.exceptions.SwiftException;
import org.apache.hadoop.fs.swift.exceptions.SwiftInternalStateException;
import org.apache.hadoop.fs.swift.exceptions.SwiftInvalidResponseException;
import org.apache.hadoop.fs.swift.ssl.EasySSLProtocolSocketFactory;
import org.apache.hadoop.fs.swift.util.JSONUtil;
import org.apache.hadoop.fs.swift.util.SwiftObjectPath;
import org.apache.hadoop.fs.swift.util.SwiftUtils;
import java.io.EOFException;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLEncoder;
import java.util.List;
import java.util.Properties;
import static org.apache.commons.httpclient.HttpStatus.*;
import static org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.*;
/**
* This implements the client-side of the Swift REST API.
*
* The core actions put, get and query data in the Swift object store,
* after authenticationg the client.
*
* <b>Logging:</b>
*
* Logging at DEBUG level displays detail about the actions of this
* client, including HTTP requests and responses.
* Logging at TRACE level displays the authentication payload -
* and so will reveal the secrets used to authenticate against
* the service. It should only be done to track down authentication problems,
* -and the logs should not be made public.
*/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class SwiftRestClient {
private static final Log LOG = LogFactory.getLog(SwiftRestClient.class);
private static final int DEFAULT_RETRY_COUNT = 3;
private static final int DEFAULT_CONNECT_TIMEOUT = 15000;
/**
* Header that says "use newest version" -ensures that
* the query doesn't pick up older versions by accident
*/
public static final Header NEWEST =
new Header(SwiftProtocolConstants.X_NEWEST, "true");
/**
* authentication endpoint
*/
private final URI authUri;
/**
* Swift region. Some OpenStack installations has more than one region.
* In this case user can specify region with which Hadoop will be working
*/
private final String region;
/**
* tenant name
*/
private final String tenant;
/**
* username name
*/
private final String username;
/**
* user password
*/
private final String password;
/**
* user api key
*/
private final String apiKey;
/**
* The container this client is working with
*/
private final String container;
/**
* Access token (Secret)
*/
private AccessToken token;
/**
* Endpoint for swift operations, obtained after authentication
*/
private URI endpointURI;
/**
* Where objects live
*/
private URI objectLocationURI;
private final URI filesystemURI;
/**
* The name of the service provider
*/
private final String serviceProvider;
/**
* Should the public swift endpoint be used, rather than the in-cluster one?
*/
private final boolean usePublicURL;
/**
* Number of times to retry a connection
*/
private final int retryCount;
/**
* How long (in milliseconds) should a connection be attempted
*/
private final int connectTimeout;
/**
* the name of a proxy host (can be null, in which case there is no proxy)
*/
private String proxyHost;
/**
* The port of a proxy. This is ignored if {@link #proxyHost} is null
*/
private int proxyPort;
/**
* objects query endpoint. This is synchronized
* to handle a simultaneous update of all auth data in one
* go.
*/
private synchronized URI getEndpointURI() {
return endpointURI;
}
/**
* object location endpoint
*/
private synchronized URI getObjectLocationURI() {
return objectLocationURI;
}
/**
* token for Swift communication
*/
private synchronized AccessToken getToken() {
return token;
}
/**
* Setter of authentication and endpoint details.
* Being synchronized guarantees that all three fields are set up together.
* It is up to the reader to read all three fields in their own
* synchronized block to be sure that they are all consistent.
* @param endpoint endpoint URI
* @param objectLocation object location URI
* @param authToken auth token
*/
private void setAuthDetails(URI endpoint,
URI objectLocation,
AccessToken authToken) {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("setAuth: endpoint=%s; objectURI=%s; token=%s",
endpoint, objectLocation, token));
}
synchronized (this) {
endpointURI = endpoint;
objectLocationURI = objectLocation;
token = authToken;
}
}
/**
* Base class for all Swift REST operations
* @param <M> method
* @param <R> result
*/
private static abstract class HttpMethodProcessor<M extends HttpMethod, R> {
public final M createMethod(String uri) throws IOException {
final M method = doCreateMethod(uri);
setup(method);
return method;
}
/**
* Override it to return some result after method is executed.
*/
public abstract R extractResult(M method) throws IOException;
/**
* Factory method to create a REST method against the given URI
* @param uri target
* @return method to invoke
*/
protected abstract M doCreateMethod(String uri);
/**
* Override it to set up method before method is executed.
*/
protected void setup(M method) throws IOException {
}
/**
* Override point: what are the status codes that this operation supports
* @return the list of status codes to accept
*/
protected int[] getAllowedStatusCodes() {
return new int[] {
SC_OK,
SC_CREATED,
SC_ACCEPTED,
SC_NO_CONTENT,
SC_PARTIAL_CONTENT,
};
}
}
private static abstract class GetMethodProcessor<R> extends HttpMethodProcessor<GetMethod, R> {
@Override
protected final GetMethod doCreateMethod(String uri) {
return new GetMethod(uri);
}
}
private static abstract class PostMethodProcessor<R> extends HttpMethodProcessor<PostMethod, R> {
@Override
protected final PostMethod doCreateMethod(String uri) {
return new PostMethod(uri);
}
}
private static abstract class PutMethodProcessor<R> extends HttpMethodProcessor<PutMethod, R> {
@Override
protected final PutMethod doCreateMethod(String uri) {
return new PutMethod(uri);
}
/**
* Override point: what are the status codes that this operation supports
* @return the list of status codes to accept
*/
protected int[] getAllowedStatusCodes() {
return new int[]{
SC_OK,
SC_CREATED,
SC_NO_CONTENT,
SC_ACCEPTED,
};
}
}
/**
* Copy operation.
* The only valid response is CREATED
* @param <R>
*/
private static abstract class CopyMethodProcessor<R> extends HttpMethodProcessor<CopyMethod, R> {
@Override
protected final CopyMethod doCreateMethod(String uri) {
return new CopyMethod(uri);
}
protected int[] getAllowedStatusCodes() {
return new int[]{
SC_CREATED
};
}
}
/**
* Delete operation
* @param <R>
*/
private static abstract class DeleteMethodProcessor<R> extends HttpMethodProcessor<DeleteMethod, R> {
@Override
protected final DeleteMethod doCreateMethod(String uri) {
return new DeleteMethod(uri);
}
@Override
protected int[] getAllowedStatusCodes() {
return new int[]{
SC_OK,
SC_ACCEPTED,
SC_NO_CONTENT,
SC_NOT_FOUND
};
}
}
private static abstract class HeadMethodProcessor<R> extends HttpMethodProcessor<HeadMethod, R> {
@Override
protected final HeadMethod doCreateMethod(String uri) {
return new HeadMethod(uri);
}
}
/**
* Create a Swift Rest Client instance.
* @param filesystemURI filesystem URI
* @param conf The configuration to use to extract the binding
* @throws SwiftConfigurationException the configuration is not valid for
* defining a rest client against the service
*/
private SwiftRestClient(URI filesystemURI,
Configuration conf)
throws SwiftConfigurationException {
this.filesystemURI = filesystemURI;
Properties props = RestClientBindings.bind(filesystemURI, conf);
String stringAuthUri = getOption(props, SWIFT_AUTH_PROPERTY);
username = getOption(props, SWIFT_USERNAME_PROPERTY);
password = props.getProperty(SWIFT_PASSWORD_PROPERTY);
apiKey = props.getProperty(SWIFT_APIKEY_PROPERTY);
//optional
region = props.getProperty(SWIFT_REGION_PROPERTY);
//tenant is optional
tenant = props.getProperty(SWIFT_TENANT_PROPERTY);
//service is used for diagnostics
serviceProvider = props.getProperty(SWIFT_SERVICE_PROPERTY);
container = props.getProperty(SWIFT_CONTAINER_PROPERTY);
String isPubProp = props.getProperty(SWIFT_PUBLIC_PROPERTY, "false");
usePublicURL = "true".equals(isPubProp);
retryCount = getIntOption(props, SWIFT_RETRY_COUNT, DEFAULT_RETRY_COUNT);
connectTimeout = getIntOption(props, SWIFT_CONNECTION_TIMEOUT,
DEFAULT_CONNECT_TIMEOUT);
if (apiKey == null && password == null) {
throw new SwiftConfigurationException(
"Configuration for "+ filesystemURI +" must contain either "
+ SWIFT_PASSWORD_PROPERTY + " or "
+ SWIFT_APIKEY_PROPERTY);
}
proxyHost = props.getProperty(SWIFT_PROXY_HOST_PROPERTY, null);
proxyPort = getIntOption(props, SWIFT_PROXY_PORT_PROPERTY, 8080);
if (LOG.isDebugEnabled()) {
//everything you need for diagnostics. The password is omitted.
LOG.debug(String.format(
"Service={%s} container={%s} uri={%s}"
+ " tenant={%s} user={%s} region={%s}"
+ " publicURL={%b}"
+ " connect timeout={%d}, retry count={%d}",
serviceProvider,
container,
stringAuthUri,
tenant,
username,
region != null ? region : "(none)",
usePublicURL,
connectTimeout,
retryCount));
}
try {
this.authUri = new URI(stringAuthUri);
} catch (URISyntaxException e) {
throw new SwiftConfigurationException("The " + SWIFT_AUTH_PROPERTY
+ " property was incorrect: "
+ stringAuthUri, e);
}
}
/**
* Get a mandatory configuration option
* @param props property set
* @param key key
* @return value of the configuration
* @throws SwiftConfigurationException if there was no match for the key
*/
private static String getOption(Properties props, String key) throws
SwiftConfigurationException {
String val = props.getProperty(key);
if (val == null) {
throw new SwiftConfigurationException("Undefined property: " + key);
}
return val;
}
private int getIntOption(Properties props, String key, int def) throws
SwiftConfigurationException {
String val = props.getProperty(key, Integer.toString(def));
try {
return Integer.decode(val);
} catch (NumberFormatException e) {
throw new SwiftConfigurationException("Failed to parse (numeric) value" +
" of property" + key
+ " : "+val, e);
}
}
/**
* This is something that needs to be looked at, as it is
* setting the static state of the http client classes.
*/
private void registerProtocols(Properties props) throws
SwiftConfigurationException {
Protocol.registerProtocol("http", new Protocol("http",
new DefaultProtocolSocketFactory(),
getIntOption(props,
SWIFT_HTTP_PORT_PROPERTY,
SWIFT_HTTP_PORT)));
Protocol.registerProtocol("https",
new Protocol("https",
(ProtocolSocketFactory) new EasySSLProtocolSocketFactory(),
getIntOption(props,
SWIFT_HTTPS_PORT_PROPERTY,
SWIFT_HTTPS_PORT)));
}
/**
* Makes HTTP GET request to Swift
*
* @param path path to object
* @param offset offset from file beginning
* @param length file length
* @return The input stream -which must be closed afterwards.
*/
public InputStream getDataAsInputStream(SwiftObjectPath path,
long offset,
long length) throws IOException {
if (offset < 0) {
throw new IOException("Invalid offset: " + offset + ".");
}
if (length <= 0) {
throw new IOException("Invalid length: " + length + ".");
}
final String range = String.format(SWIFT_RANGE_HEADER_FORMAT_PATTERN,
offset,
offset + length - 1);
return getDataAsInputStream(path,
new Header(HEADER_RANGE, range),
SwiftRestClient.NEWEST);
}
/**
* Returns object length
*
* @param uri file URI
* @return object length
* @throws SwiftException on swift-related issues
* @throws IOException on network/IO problems
*/
public long getContentLength(URI uri) throws IOException {
preRemoteCommand("getContentLength");
return perform(uri, new HeadMethodProcessor<Long>() {
@Override
public Long extractResult(HeadMethod method) throws IOException {
return method.getResponseContentLength();
}
@Override
protected void setup(HeadMethod method) throws IOException {
super.setup(method);
method.addRequestHeader(NEWEST);
}
});
}
/**
* Get the length of the remote object
* @param path object to probe
* @return the content length
* @throws IOException on any failure
*/
public long getContentLength(SwiftObjectPath path) throws IOException {
return getContentLength(pathToURI(path));
}
/**
* Get the path contents as an input stream.
* <b>Warning:</b> this input stream must be closed to avoid
* keeping Http connections open.
*
* @param path path to file
* @param requestHeaders http headers
* @return byte[] file data or null if the object was not found
* @throws IOException on IO Faults
* @throws FileNotFoundException if there is nothing at the path
*/
public InputStream getDataAsInputStream(SwiftObjectPath path,
final Header... requestHeaders)
throws IOException {
preRemoteCommand("getDataAsInputStream");
return doGet(pathToURI(path),
requestHeaders);
}
/**
* Returns object location as byte[]
*
* @param path path to file
* @param requestHeaders http headers
* @return byte[] file data or null if the object was not found
* @throws IOException on IO Faults
*/
public byte[] getObjectLocation(SwiftObjectPath path,
final Header... requestHeaders) throws IOException {
preRemoteCommand("getObjectLocation");
return perform(pathToObjectLocation(path),
new GetMethodProcessor<byte[]>() {
@Override
public byte[] extractResult(GetMethod method) throws
IOException {
//TODO: remove SC_NO_CONTENT if it depends on Swift versions
if (method.getStatusCode() == SC_NOT_FOUND ||
method.getStatusCode() == SC_NO_CONTENT ||
method.getResponseBodyAsStream() == null) {
return null;
}
final InputStream responseBodyAsStream =
method.getResponseBodyAsStream();
final byte[] locationData = new byte[1024];
return responseBodyAsStream.read(locationData) > 0
? locationData
: null;
}
@Override
protected void setup(GetMethod method)
throws SwiftInternalStateException {
setHeaders(method, requestHeaders);
}
});
}
private URI pathToObjectLocation(SwiftObjectPath path) throws SwiftException {
URI uri;
String dataLocationURI = objectLocationURI.toString();
try {
if (path.toString().startsWith("/")) {
dataLocationURI = dataLocationURI.concat(path.toUriPath());
} else {
dataLocationURI = dataLocationURI.concat("/").concat(path.toUriPath());
}
uri = new URI(dataLocationURI);
} catch (URISyntaxException e) {
throw new SwiftException(e);
}
return uri;
}
/**
* Find objects under a prefix
*
* @param path path prefix
* @param delimiter delimiter of path, can be null
* @param requestHeaders optional request headers
* @return byte[] file data or null if the object was not found
* @throws IOException on IO Faults
* @throws FileNotFoundException if nothing is at the end of the URI -that is,
* the directory is empty
*/
public byte[] findObjectsByPrefix(SwiftObjectPath path,
String delimiter,
final Header... requestHeaders) throws IOException {
preRemoteCommand("findObjectsByPrefix");
if (LOG.isDebugEnabled()) {
LOG.debug("findObjectsByPrefix path=" + path + " delimiter=" + delimiter);
}
String endpoint = getEndpointURI().toString();
StringBuilder dataLocationURI = new StringBuilder();
dataLocationURI.append(endpoint);
String object = path.getObject();
if (object.startsWith("/")) {
object = object.substring(1);
}
dataLocationURI = dataLocationURI.append("/")
.append(path.getContainer());
maybeAppendPrefix(dataLocationURI, object);
if (delimiter != null) {
dataLocationURI.append("&delimiter=/").append(delimiter);
}
return findObjects(dataLocationURI.toString(), requestHeaders);
}
private void maybeAppendPrefix(StringBuilder dataLocationURI, String object) {
if (!object.isEmpty() && !"/".equals(object)) {
dataLocationURI.append("/?prefix=")
.append(object);
}
}
/**
* Find objects in a directory
*
* @param path path prefix
* @param requestHeaders optional request headers
* @return byte[] file data or null if the object was not found
* @throws IOException on IO Faults
* @throws FileNotFoundException if nothing is at the end of the URI -that is,
* the directory is empty
*/
public byte[] listObjectsInDirectory(SwiftObjectPath path,
final Header... requestHeaders) throws IOException {
preRemoteCommand("listObjectsInPath");
if (LOG.isDebugEnabled()) {
LOG.debug("listObjectsInDirectory path=" + path );
}
String endpoint = getEndpointURI().toString();
StringBuilder dataLocationURI1 = new StringBuilder();
dataLocationURI1.append(endpoint);
String object = path.getObject();
if (object.startsWith("/")) {
object = object.substring(1);
}
if (!object.endsWith("/")) {
object = object.concat("/");
}
dataLocationURI1 = dataLocationURI1.append("/")
.append(path.getContainer());
maybeAppendPrefix(dataLocationURI1, object);
StringBuilder dataLocationURI = dataLocationURI1;
dataLocationURI.append("&delimiter=/");
return findObjects(dataLocationURI.toString(), requestHeaders);
}
/**
* Find objects in a location
* @param location URI
* @param requestHeaders optional request headers
* @return the body of te response
* @throws IOException IO problems
*/
private byte[] findObjects(String location, final Header[] requestHeaders) throws
IOException {
preRemoteCommand("findObjects");
URI uri;
try {
uri = new URI(location);
} catch (URISyntaxException e) {
throw new SwiftException("Bad URI: " + location, e);
}
return perform(uri, new GetMethodProcessor<byte[]>() {
@Override
public byte[] extractResult(GetMethod method) throws IOException {
if (method.getStatusCode() == SC_NOT_FOUND) {
//no result
throw new FileNotFoundException("Not found " + method.getURI());
}
return method.getResponseBody();
}
@Override
protected int[] getAllowedStatusCodes() {
return new int[]{
SC_OK,
SC_NOT_FOUND
};
}
@Override
protected void setup(GetMethod method)
throws SwiftInternalStateException {
setHeaders(method, requestHeaders);
}
});
}
/**
* Copy an object. This is done by sending a COPY method to the filesystem
* which is required to handle this WebDAV-level extension to the
* base HTTP operations.
* @param src source path
* @param dst destination path
* @param headers any headers
* @return true if the status code was considered successful
* @throws IOException on IO Faults
*/
public boolean copyObject(SwiftObjectPath src, final SwiftObjectPath dst, final Header... headers)
throws IOException {
preRemoteCommand("copyObject");
return perform(pathToURI(src), new CopyMethodProcessor<Boolean>() {
@Override
public Boolean extractResult(CopyMethod method) throws IOException {
return true;
}
@Override
protected void setup(CopyMethod method) throws
SwiftInternalStateException {
setHeaders(method, headers);
method.addRequestHeader(HEADER_DESTINATION, dst.toUriPath());
}
});
}
/**
* Uploads file as Input Stream to Swift
*
* @param path path to Swift
* @param data object data
* @param length length of data
* @param requestHeaders http headers
* @throws IOException on IO Faults
*/
public void upload(SwiftObjectPath path,
final InputStream data,
final long length,
final Header... requestHeaders)
throws IOException {
preRemoteCommand("upload");
perform(pathToURI(path), new PutMethodProcessor<byte[]>() {
@Override
public byte[] extractResult(PutMethod method) throws IOException {
return method.getResponseBody();
}
@Override
protected void setup(PutMethod method) throws
SwiftInternalStateException {
method.setRequestEntity(new InputStreamRequestEntity(data, length));
setHeaders(method, requestHeaders);
}
});
}
/**
* Deletes object from swift.
* The result is true if this operation did the deletion.
* @param path path to file
* @param requestHeaders http headers
* @throws IOException on IO Faults
*/
public boolean delete(SwiftObjectPath path, final Header... requestHeaders) throws IOException {
preRemoteCommand("delete");
return perform(pathToURI(path), new DeleteMethodProcessor<Boolean>() {
@Override
public Boolean extractResult(DeleteMethod method) throws IOException {
return method.getStatusCode() == SC_NO_CONTENT;
}
@Override
protected void setup(DeleteMethod method) throws
SwiftInternalStateException {
setHeaders(method, requestHeaders);
}
});
}
/**
* Issue a head request
* @param path path to query
* @param requestHeaders request header
* @return the response headers. This may be an empty list
* @throws IOException IO problems
* @throws FileNotFoundException if there is nothing at the end
*/
public Header[] headRequest(SwiftObjectPath path, final Header... requestHeaders) throws IOException {
preRemoteCommand("headRequest");
return perform(pathToURI(path), new HeadMethodProcessor<Header[]>() {
@Override
public Header[] extractResult(HeadMethod method) throws IOException {
if (method.getStatusCode() == SC_NOT_FOUND) {
throw new FileNotFoundException("Not Found " + method.getURI());
}
return method.getResponseHeaders();
}
@Override
protected void setup(HeadMethod method) throws
SwiftInternalStateException {
setHeaders(method, requestHeaders);
}
});
}
public int putRequest(SwiftObjectPath path, final Header... requestHeaders) throws IOException {
preRemoteCommand("putRequest");
return perform(pathToURI(path), new PutMethodProcessor<Integer>() {
@Override
public Integer extractResult(PutMethod method) throws IOException {
return method.getStatusCode();
}
@Override
protected void setup(PutMethod method) throws
SwiftInternalStateException {
setHeaders(method, requestHeaders);
}
});
}
/**
* Authenticate to Openstack Keystone
* As well as returning the access token, the member fields {@link #token},
* {@link #endpointURI} and {@link #objectLocationURI} are set up for re-use.
*
* This method is re-entrant -if more than one thread attempts to authenticate
* neither will block -but the field values with have those of the last caller.
*
* <b>Important:</b> if executed at TRACE level then this method will log the
* JSON payload of the authentication. While this can be invaluable for debugging
* authentication problems, it can include login information -including
* the password. Only turn this level of logging on when dealing with
* authentication problems.
* @return authenticated access token
*/
public AccessToken authenticate() throws IOException {
LOG.debug("started authentication");
return perform(authUri, new PostMethodProcessor<AccessToken>() {
@Override
protected void setup(PostMethod method) throws SwiftException {
AuthenticationRequest authRequest = null;
if (password != null) {
authRequest = new PasswordAuthenticationRequest(tenant,
new PasswordCredentials(
username,
password));
} else {
authRequest = new ApiKeyAuthenticationRequest(tenant,
new ApiKeyCredentials(
username, apiKey));
}
final String data = JSONUtil.toJSON(new AuthenticationRequestWrapper(
authRequest));
if (LOG.isDebugEnabled()) {
LOG.debug("Authenticating with " + authRequest);
}
if (LOG.isTraceEnabled()) {
LOG.trace("JSON message: " + "\n" + data);
}
method.setRequestEntity(toJsonEntity(data));
}
/**
* specification says any of the 2xxs are OK, so list all
* the standard ones
* @return a set of 2XX status codes.
*/
@Override
protected int[] getAllowedStatusCodes() {
return new int[]{
SC_OK,
SC_CREATED,
SC_ACCEPTED,
SC_NON_AUTHORITATIVE_INFORMATION,
SC_NO_CONTENT,
SC_RESET_CONTENT,
SC_PARTIAL_CONTENT,
SC_MULTI_STATUS
};
}
@Override
public AccessToken extractResult(PostMethod method) throws IOException {
final AuthenticationResponse access =
JSONUtil.toObject(method.getResponseBodyAsString(),
AuthenticationWrapper.class).getAccess();
final List<Catalog> serviceCatalog = access.getServiceCatalog();
//locate the specific service catalog that defines Swift; variations
//in the name of this add complexity to the search
boolean catalogMatch = false;
StringBuilder catList = new StringBuilder();
StringBuilder regionList = new StringBuilder();
//these fields are all set together at the end of the operation
URI endpointURI = null;
URI objectLocation;
Endpoint swiftEndpoint = null;
AccessToken accessToken;
for (Catalog catalog : serviceCatalog) {
String name = catalog.getName();
String type = catalog.getType();
String descr = String.format("[%s: %s]; ", name, type);
catList.append(descr);
if (LOG.isDebugEnabled()) {
LOG.debug("Catalog entry " + descr);
}
if (name.equals(SERVICE_CATALOG_SWIFT)
|| name.equals(SERVICE_CATALOG_CLOUD_FILES)
|| type.equals(SERVICE_CATALOG_OBJECT_STORE)) {
//swift is found
if (LOG.isDebugEnabled()) {
LOG.debug("Found swift catalog as " + name + " => " + type);
}
//now go through the endpoints
for (Endpoint endpoint : catalog.getEndpoints()) {
String endpointRegion = endpoint.getRegion();
URI publicURL = endpoint.getPublicURL();
URI internalURL = endpoint.getInternalURL();
descr = String.format("[%s => %s / %s]; ",
endpointRegion,
publicURL,
internalURL);
regionList.append(descr);
if (LOG.isDebugEnabled()) {
LOG.debug("Endpoint " + descr);
}
if (region == null || endpointRegion.equals(region)) {
endpointURI = usePublicURL ?publicURL: internalURL;
swiftEndpoint = endpoint;
break;
}
}
}
}
if (endpointURI == null) {
String message = "Could not find swift service from auth URL "
+ authUri
+ " and region '" + region + "'. "
+ "Categories: " + catList
+ ((regionList.length() > 0) ?
("regions: " + regionList)
: "No regions");
throw new SwiftInvalidResponseException(message,
SC_OK,
"authenticating",
authUri);
}
accessToken = access.getToken();
String path = SWIFT_OBJECT_AUTH_ENDPOINT
+ swiftEndpoint.getTenantId();
String host = endpointURI.getHost();
try {
objectLocation = new URI(endpointURI.getScheme(),
null,
host,
endpointURI.getPort(),
path,
null,
null);
} catch (URISyntaxException e) {
throw new SwiftException("object endpoint URI is incorrect: "
+ endpointURI
+ " + " + path,
e);
}
setAuthDetails(endpointURI, objectLocation, accessToken);
if (LOG.isDebugEnabled()) {
LOG.debug("authenticated against " + endpointURI);
}
createDefaultContainer();
return accessToken;
}
});
}
/**
* create default container if it doesn't exist for Hadoop Swift integration.
* non-reentrant, as this should only be needed once.
* @throws IOException IO problems.
*/
private synchronized void createDefaultContainer() throws IOException {
createContainer(container);
}
/**
* Create a container -if it already exists, do nothing
* @param containerName the container name
* @throws IOException IO problems
* @throws SwiftBadRequestException invalid container name
* @throws SwiftInvalidResponseException error from the server
*/
public void createContainer(String containerName) throws IOException {
SwiftObjectPath objectPath = new SwiftObjectPath(containerName, "");
try {
//see if the data is there
headRequest(objectPath, NEWEST);
} catch (FileNotFoundException ex) {
int status = 0;
try {
status = putRequest(objectPath);
} catch (FileNotFoundException e) {
//triggered by a very bad container name.
//re-insert the 404 result into the status
status = SC_NOT_FOUND;
}
if (status == SC_BAD_REQUEST) {
throw new SwiftBadRequestException("Bad request " +
"-possibly an illegal container name");
}
if (!isStatusCodeExpected(status,
SC_OK,
SC_CREATED,
SC_ACCEPTED,
SC_NO_CONTENT)) {
throw new SwiftInvalidResponseException("Couldn't create container "
+ containerName +
" for storing data in Swift." +
" Try to create container " +
containerName + " manually ",
status,
"PUT",
null);
}
else {
throw ex;
}
}
}
/**
* Trigger an initial auth operation if some of the needed
* fields are missing
* @throws IOException on problems
*/
private void authIfNeeded() throws IOException {
if (getEndpointURI() == null) {
authenticate();
}
}
/**
* Pre-execution actions to be performed by methods. Currently this
* <ul>
* <li>Logs the operation at TRACE</li>
* <li>Authenticates the client -if needed</li>
* </ul>
* @throws IOException
*/
private void preRemoteCommand(String operation) throws IOException {
if (LOG.isTraceEnabled()) {
LOG.trace("Executing " + operation);
}
authIfNeeded();
}
/**
* Performs the HTTP request, validates the response code and returns
* the received data. HTTP Status codes are converted into exceptions.
*
* @param uri URI to source
* @param processor HttpMethodProcessor
* @param <M> method
* @param <R> result type
* @return result of HTTP request
* @throws IOException IO problems
* @throws SwiftBadRequestException the status code indicated "Bad request"
* @throws SwiftInvalidResponseException the status code is out of range
* for the action (excluding 404 responses)
* @throws SwiftInternalStateException the internal state of this client
* is invalid
* @throws FileNotFoundException a 404 response was returned
*/
private <M extends HttpMethod, R> R perform(URI uri,
HttpMethodProcessor<M, R> processor)
throws IOException, SwiftBadRequestException, SwiftInternalStateException,
SwiftInvalidResponseException, FileNotFoundException {
checkNotNull(uri);
checkNotNull(processor);
final M method = processor.createMethod(uri.toString());
//retry policy
HttpMethodParams methodParams = method.getParams();
methodParams.setParameter(HttpMethodParams.RETRY_HANDLER,
new DefaultHttpMethodRetryHandler(
retryCount, false));
methodParams.setSoTimeout(connectTimeout);
try {
int statusCode = exec(method);
//look at the response and see if it was valid or not.
//Valid is more than a simple 200; even 404 "not found" is considered
//valid -which it is for many methods.
//validate the allowed status code for this operation
int[] allowedStatusCodes = processor.getAllowedStatusCodes();
boolean validResponse = isStatusCodeExpected(statusCode,
allowedStatusCodes);
if (!validResponse) {
IOException ioe = buildException(uri, method, statusCode);
throw ioe;
}
return processor.extractResult(method);
} catch (IOException e) {
//release the connection -always
method.releaseConnection();
throw e;
}
}
/**
* Build an exception from a failed operation. This can include generating
* specific exceptions (e.g. FileNotFound), as well as the default
* {@link SwiftInvalidResponseException}
* {@link SwiftInvalidResponseException}.
* @param uri URI for operation
* @param method operation that failed
* @param statusCode status code
* @param <M> method type
* @return an exception to throw.
*/
private <M extends HttpMethod> IOException buildException(URI uri,
M method,
int statusCode) {
IOException fault;
//log the failure @debug level
String errorMessage = String.format("Method %s on %s failed, status code: %d," +
" status line: %s",
method.getName(),
uri,
statusCode,
method.getStatusLine()
);
if (LOG.isDebugEnabled()) {
LOG.debug(errorMessage);
}
//send the command
switch (statusCode) {
case SC_NOT_FOUND:
fault = new FileNotFoundException("Operation " + method.getName()
+ " on " + uri);
break;
case SC_BAD_REQUEST:
//bad HTTP request
fault = new SwiftBadRequestException("Bad request against " + uri);
break;
case SC_REQUESTED_RANGE_NOT_SATISFIABLE:
//out of range: end of the message
fault = new EOFException(method.getStatusText());
break;
default:
fault = new SwiftInvalidResponseException(
errorMessage,
statusCode,
method.getName(),
uri);
}
return fault;
}
/**
* Exec a GET request and return the input stream of the response
* @param uri URI to GET
* @param requestHeaders request headers
* @return the input stream. This must be closed to avoid log errors
* @throws IOException
*/
private InputStream doGet(final URI uri, final Header... requestHeaders)
throws IOException {
return perform(uri, new GetMethodProcessor<InputStream>() {
@Override
public InputStream extractResult(GetMethod method) throws IOException {
return new HttpInputStreamWithRelease(uri, method);
}
@Override
protected void setup(GetMethod method) throws
SwiftInternalStateException {
setHeaders(method, requestHeaders);
}
});
}
/**
* Create an instance against a specific FS URI,
*
* @param filesystemURI filesystem to bond to
* @param config source of configuration data
* @return REST client instance
* @throws IOException on instantiation problems
*/
public static SwiftRestClient getInstance(URI filesystemURI,
Configuration config) throws IOException {
return new SwiftRestClient(filesystemURI, config);
}
/**
* Convert the (JSON) data to a string request as UTF-8
* @param data data
* @return the data
* @throws SwiftException if for some very unexpected reason it's impossible
* to convert the data to UTF-8.
*/
private static StringRequestEntity toJsonEntity(String data) throws
SwiftException {
StringRequestEntity entity;
try {
entity = new StringRequestEntity(data, "application/json", "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new SwiftException("Could not encode data as UTF-8", e);
}
return entity;
}
/**
* Converts Swift path to URI to make request.
* This is public for unit testing
*
*
* @param path path to object
* @param endpointURI damain url e.g. http://domain.com
* @return valid URI for object
* @throws SwiftException the path built from the endpoint and path not a URI
*/
public static URI pathToURI(SwiftObjectPath path,
URI endpointURI) throws SwiftException {
checkNotNull(endpointURI, "Null Endpoint -client is not authenticated");
String dataLocationURI = endpointURI.toString();
try {
dataLocationURI = SwiftUtils.joinPaths(dataLocationURI, encodeUrl(path.toUriPath()));
return new URI(dataLocationURI);
} catch (URISyntaxException e) {
throw new SwiftException("Failed to create URI from " + dataLocationURI, e);
}
}
/**
* Encode the URL. This extends {@link URLEncoder#encode(String, String)}
* with a replacement of + with %20.
* @param url URL string
* @return an encoded string
* @throws SwiftException if the URL cannot be encoded
*/
private static String encodeUrl(String url) throws SwiftException {
if (url.matches(".*\\s+.*")) {
try {
url = URLEncoder.encode(url, "UTF-8");
url = url.replace("+", "%20");
} catch (UnsupportedEncodingException e) {
throw new SwiftException("failed to encode URI", e);
}
}
return url;
}
/**
* Convert a swift path to a URI relative to the current endpoint.
* @param path path
* @return an path off the current endpoint URI.
* @throws SwiftException
*/
private URI pathToURI(SwiftObjectPath path) throws SwiftException {
return pathToURI(path, getEndpointURI());
}
/**
* Add the headers to the method, and the auth token (which must be set
* @param method method to update
* @param requestHeaders the list of headers
* @throws SwiftInternalStateException not yet authenticated
*/
private void setHeaders(HttpMethodBase method, Header[] requestHeaders)
throws SwiftInternalStateException {
for (Header header : requestHeaders) {
method.addRequestHeader(header);
}
setAuthToken(method, getToken());
}
/**
* Set the auth key header of the method to the token ID supplied
* @param method method
* @param accessToken access token
* @throws SwiftInternalStateException if the client is not yet authenticated
*/
private void setAuthToken(HttpMethodBase method, AccessToken accessToken)
throws SwiftInternalStateException {
checkNotNull(accessToken,"Not authenticated");
method.addRequestHeader(HEADER_AUTH_KEY, accessToken.getId());
}
/**
* Execute a method in a new HttpClient instance.
* If the auth failed, authenticate then retry the method.
* @param method methot to exec
* @param <M> Method type
* @return the status code
* @throws IOException on any failure
* @throws SwiftConnectionException failure to connect or authenticate
*/
private <M extends HttpMethod> int exec(M method)
throws IOException, SwiftConnectionException {
final HttpClient client = new HttpClient();
if (proxyHost != null) {
client.getParams().setParameter(HTTP_ROUTE_DEFAULT_PROXY,
new HttpHost(proxyHost, proxyPort));
}
int statusCode = execWithDebugOutput(method, client);
if (method.getStatusCode() == HttpStatus.SC_UNAUTHORIZED) {
//unauthed -look at what raised the response
if (method.getURI().toString().equals(authUri.toString())) {
//unauth response from the AUTH URI itself.
throw new SwiftConnectionException(
"Authentication failed, URI credentials are incorrect,"
+ " or Openstack Keystone is configured incorrectly. URL='"
+ authUri + "' "
+ "username={" + username + "} "
+ "password length=" + password.length()
);
} else {
//any other URL: try again
if (LOG.isDebugEnabled()) {
LOG.debug("Reauthenticating");
}
authenticate();
if (LOG.isDebugEnabled()) {
LOG.debug("Retrying original request");
}
statusCode = execWithDebugOutput(method, client);
}
}
return statusCode;
}
/**
* Execute the request with the request and response logged at debug level
* @param method method to execute
* @param client client to use
* @param <M> method type
* @return the status code
* @throws IOException any failure reported by the HTTP client.
*/
private <M extends HttpMethod> int execWithDebugOutput(M method,
HttpClient client)
throws IOException {
if (LOG.isDebugEnabled()) {
StringBuilder builder = new StringBuilder(
method.getName() + " " + method.getURI()+"\n");
for (Header header:method.getRequestHeaders()) {
builder.append(header.toString());
}
LOG.debug(builder);
}
int statusCode = client.executeMethod(method);
if (LOG.isDebugEnabled()) {
LOG.debug("Status code = " + statusCode);
}
return statusCode;
}
/**
* Ensures that an object reference passed as a parameter to the calling
* method is not null.
*
* @param reference an object reference
* @return the non-null reference that was validated
* @throws NullPointerException if {@code reference} is null
*/
private static <T> T checkNotNull(T reference) throws
SwiftInternalStateException {
return checkNotNull(reference, "Null Reference");
}
private static <T> T checkNotNull(T reference, String message) throws
SwiftInternalStateException {
if (reference == null) {
throw new SwiftInternalStateException(message);
}
return reference;
}
/**
* Check for a status code being expected -takes a list of expected values
*
* @param status received status
* @param expected expected value
* @return true iff status is an element of [expected]
*/
private boolean isStatusCodeExpected(int status, int... expected) {
for (int code : expected) {
if (status == code) {
return true;
}
}
return false;
}
@Override
public String toString() {
return "SwiftRestClient: "+ filesystemURI ;
}
}
|
/*
* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2;
import org.springframework.context.ApplicationContext;
import org.springframework.test.context.support.GenericXmlContextLoader;
import org.springframework.web.context.WebApplicationContext;
/**
* Base class for Spring JUnit actions
*/
public abstract class StrutsSpringTestCase extends StrutsTestCase {
private static final String DEFAULT_CONTEXT_LOCATION = "classpath*:applicationContext.xml";
protected static ApplicationContext applicationContext;
protected void setupBeforeInitDispatcher() throws Exception {
// only load beans from spring once
if (applicationContext == null) {
GenericXmlContextLoader xmlContextLoader = new GenericXmlContextLoader();
applicationContext = xmlContextLoader.loadContext(getContextLocations());
}
servletContext.setAttribute(WebApplicationContext.ROOT_WEB_APPLICATION_CONTEXT_ATTRIBUTE, applicationContext);
}
protected String[] getContextLocations() {
return new String[] {DEFAULT_CONTEXT_LOCATION};
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.service.cli.operation;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.hive.metastore.api.Schema;
import org.apache.hadoop.hive.ql.processors.CommandProcessor;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.io.IOUtils;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationState;
import org.apache.hive.service.cli.RowSet;
import org.apache.hive.service.cli.RowSetFactory;
import org.apache.hive.service.cli.TableSchema;
import org.apache.hive.service.cli.session.HiveSession;
/**
* Executes a HiveCommand
*/
public class HiveCommandOperation extends ExecuteStatementOperation {
private CommandProcessorResponse response;
private CommandProcessor commandProcessor;
private TableSchema resultSchema = null;
/**
* For processors other than Hive queries (Driver), they output to session.out (a temp file)
* first and the fetchOne/fetchN/fetchAll functions get the output from pipeIn.
*/
private BufferedReader resultReader;
protected HiveCommandOperation(HiveSession parentSession, String statement,
CommandProcessor commandProcessor, Map<String, String> confOverlay) {
super(parentSession, statement, confOverlay, false);
this.commandProcessor = commandProcessor;
setupSessionIO(parentSession.getSessionState());
}
private void setupSessionIO(SessionState sessionState) {
try {
LOG.info("Putting temp output to file " + sessionState.getTmpOutputFile().toString());
sessionState.in = null; // hive server's session input stream is not used
// open a per-session file in auto-flush mode for writing temp results
sessionState.out = new PrintStream(new FileOutputStream(sessionState.getTmpOutputFile()), true, "UTF-8");
// TODO: for hadoop jobs, progress is printed out to session.err,
// we should find a way to feed back job progress to client
sessionState.err = new PrintStream(System.err, true, "UTF-8");
} catch (IOException e) {
LOG.error("Error in creating temp output file ", e);
try {
sessionState.in = null;
sessionState.out = new PrintStream(System.out, true, "UTF-8");
sessionState.err = new PrintStream(System.err, true, "UTF-8");
} catch (UnsupportedEncodingException ee) {
LOG.error("Error creating PrintStream", e);
ee.printStackTrace();
sessionState.out = null;
sessionState.err = null;
}
}
}
private void tearDownSessionIO() {
IOUtils.cleanup(LOG, parentSession.getSessionState().out);
IOUtils.cleanup(LOG, parentSession.getSessionState().err);
}
/* (non-Javadoc)
* @see org.apache.hive.service.cli.operation.Operation#run()
*/
@Override
public void run() throws HiveSQLException {
setState(OperationState.RUNNING);
try {
String command = getStatement().trim();
String[] tokens = statement.split("\\s");
String commandArgs = command.substring(tokens[0].length()).trim();
response = commandProcessor.run(commandArgs);
int returnCode = response.getResponseCode();
if (returnCode != 0) {
throw new HiveSQLException("Error while processing statement: "
+ response.getErrorMessage(), response.getSQLState(), response.getResponseCode());
}
Schema schema = response.getSchema();
if (schema != null) {
setHasResultSet(true);
resultSchema = new TableSchema(schema);
} else {
setHasResultSet(false);
resultSchema = new TableSchema();
}
} catch (HiveSQLException e) {
setState(OperationState.ERROR);
throw e;
} catch (Exception e) {
setState(OperationState.ERROR);
throw new HiveSQLException("Error running query: " + e.toString(), e);
}
setState(OperationState.FINISHED);
}
/* (non-Javadoc)
* @see org.apache.hive.service.cli.operation.Operation#close()
*/
@Override
public void close() throws HiveSQLException {
setState(OperationState.CLOSED);
tearDownSessionIO();
cleanTmpFile();
}
/* (non-Javadoc)
* @see org.apache.hive.service.cli.operation.Operation#getResultSetSchema()
*/
@Override
public TableSchema getResultSetSchema() throws HiveSQLException {
return resultSchema;
}
/* (non-Javadoc)
* @see org.apache.hive.service.cli.operation.Operation#getNextRowSet(org.apache.hive.service.cli.FetchOrientation, long)
*/
@Override
public RowSet getNextRowSet(FetchOrientation orientation, long maxRows) throws HiveSQLException {
validateDefaultFetchOrientation(orientation);
if (orientation.equals(FetchOrientation.FETCH_FIRST)) {
resetResultReader();
}
List<String> rows = readResults((int) maxRows);
RowSet rowSet = RowSetFactory.create(resultSchema, getProtocolVersion());
for (String row : rows) {
rowSet.addRow(new String[] {row});
}
return rowSet;
}
/**
* Reads the temporary results for non-Hive (non-Driver) commands to the
* resulting List of strings.
* @param results list of strings containing the results
* @param nLines number of lines read at once. If it is <= 0, then read all lines.
*/
private List<String> readResults(int nLines) throws HiveSQLException {
if (resultReader == null) {
SessionState sessionState = getParentSession().getSessionState();
File tmp = sessionState.getTmpOutputFile();
try {
resultReader = new BufferedReader(new FileReader(tmp));
} catch (FileNotFoundException e) {
LOG.error("File " + tmp + " not found. ", e);
throw new HiveSQLException(e);
}
}
List<String> results = new ArrayList<String>();
for (int i = 0; i < nLines || nLines <= 0; ++i) {
try {
String line = resultReader.readLine();
if (line == null) {
// reached the end of the result file
break;
} else {
results.add(line);
}
} catch (IOException e) {
LOG.error("Reading temp results encountered an exception: ", e);
throw new HiveSQLException(e);
}
}
return results;
}
private void cleanTmpFile() {
resetResultReader();
SessionState sessionState = getParentSession().getSessionState();
File tmp = sessionState.getTmpOutputFile();
tmp.delete();
}
private void resetResultReader() {
if (resultReader != null) {
IOUtils.cleanup(LOG, resultReader);
resultReader = null;
}
}
}
|
package org.infinispan.stream.impl.intops.primitive.i;
import java.util.stream.IntStream;
import org.infinispan.stream.impl.intops.IntermediateOperation;
/**
* Performs distinct operation on a {@link IntStream}
*/
public class DistinctIntOperation implements IntermediateOperation<Integer, IntStream, Integer, IntStream> {
private static final DistinctIntOperation OPERATION = new DistinctIntOperation();
private DistinctIntOperation() { }
public static DistinctIntOperation getInstance() {
return OPERATION;
}
@Override
public IntStream perform(IntStream stream) {
return stream.distinct();
}
}
|
package com.zhazhapan.efo.service;
import com.zhazhapan.efo.model.UploadedRecord;
import java.util.List;
public interface IUploadedService {
/**
* 获取所有上传记录
*
* @param user 用户名或邮箱
* @param category 分类名称
* @param file 文件名
* @param offset 偏移
*
* @return {@link List}
*/
List<UploadedRecord> list(String user, String file, String category, int offset);
}
|
package game.main.positionable.entity.monster;
import game.main.positionable.entity.Entity;
public class Monster extends Entity {
@Override
public boolean isHostile(Entity e) {
return e == map.player;
}
}
|
package com.alibaba.alink.operator.batch.similarity;
import org.apache.flink.ml.api.misc.param.Params;
import com.alibaba.alink.operator.batch.utils.ModelMapBatchOp;
import com.alibaba.alink.operator.common.similarity.NearestNeighborsMapper;
import com.alibaba.alink.params.similarity.NearestNeighborPredictParams;
/**
* Find the approximate nearest neighbor of query texts.
*/
public class TextApproxNearestNeighborPredictBatchOp extends ModelMapBatchOp <TextApproxNearestNeighborPredictBatchOp>
implements NearestNeighborPredictParams <TextApproxNearestNeighborPredictBatchOp> {
private static final long serialVersionUID = -5810550818671846741L;
public TextApproxNearestNeighborPredictBatchOp() {
this(new Params());
}
public TextApproxNearestNeighborPredictBatchOp(Params params) {
super(NearestNeighborsMapper::new, params);
}
}
|
package ca.carleton.gcrc.olkit.multimedia.converter.threshold;
import ca.carleton.gcrc.olkit.multimedia.converter.MultimediaConversionThreshold;
public class ThresholdDummy implements MultimediaConversionThreshold {
private boolean conversionRequired;
private boolean resizeRequired;
public ThresholdDummy(boolean conversionRequired, boolean resizeRequired) {
this.conversionRequired = conversionRequired;
this.resizeRequired = resizeRequired;
}
@Override
public boolean isConversionRequired(String videoFormat, Long videoRate,
String audioFormat, Long audioRate, Long imageWidth,
Long imageHeight) {
return conversionRequired;
}
@Override
public boolean isResizeRequired(Long imageWidth, Long imageHeight) {
return resizeRequired;
}
}
|
/**
*
*/
package framework.pagenavigation.State.ConcreteState;
import framework.pagenavigation.Mediator.AbstractMediator.APageNavigator;
import framework.pagenavigation.State.AbstractState.INavigatorState;
/**
* @author Quan Yang
*
*/
public class PListToMainState extends INavigatorState {
public PListToMainState(APageNavigator navigator) {
super(ENavState.FROMBTOA, navigator);
}
@Override
public void navigate() {
navigator.setCurrentState(navigator.getFromAToBState());
}
}
|
package io.automatiko.engine.workflow.bpmn2.core;
import java.io.Serializable;
public class Signal implements Serializable {
private static final long serialVersionUID = 510l;
private String id;
private String name;
private String structureRef;
public Signal(String id, String structureRef) {
this.id = id;
this.structureRef = structureRef;
}
public Signal(String id, String name, String structureRef) {
this(id, structureRef);
this.name = name;
}
public String getId() {
return id;
}
public String getStructureRef() {
return structureRef;
}
public String getName() {
return name;
}
}
|
package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* 修复结算金额amount的类型,原来Number类型不合理,导致商户无法通过sdk集成,需改成Price类型
*
* @author auto create
* @since 1.0, 2019-07-15 14:30:19
*/
public class SettleDetailInfo extends AlipayObject {
private static final long serialVersionUID = 1646482799723732784L;
/**
* 结算的金额,单位为元。目前必须和交易金额相同
*/
@ApiField("amount")
private String amount;
/**
* 结算主体标识。当结算主体类型为SecondMerchant时,为二级商户的SecondMerchantID;当结算主体类型为Store时,为门店的外标。
*/
@ApiField("settle_entity_id")
private String settleEntityId;
/**
* 结算主体类型。 二级商户:SecondMerchant;商户或者直连商户门店:Store
*/
@ApiField("settle_entity_type")
private String settleEntityType;
/**
* 结算汇总维度,按照这个维度汇总成批次结算,由商户指定。
* <p>
* 目前需要和结算收款方账户类型为cardAliasNo配合使用
*/
@ApiField("summary_dimension")
private String summaryDimension;
/**
* 结算收款方。当结算收款方类型是cardAliasNo时,本参数为用户在支付宝绑定的卡编号;结算收款方类型是userId时,本参数为用户的支付宝账号对应的支付宝唯一用户号,以2088开头的纯16位数字;当结算收款方类型是loginName
* 时,本参数为用户的支付宝登录号
*/
@ApiField("trans_in")
private String transIn;
/**
* 结算收款方的账户类型。
* <p>
* cardAliasNo:结算收款方的银行卡编号; userId:表示是支付宝账号对应的支付宝唯一用户号; loginName:表示是支付宝登录号;
*/
@ApiField("trans_in_type")
private String transInType;
public String getAmount() {
return this.amount;
}
public void setAmount(String amount) {
this.amount = amount;
}
public String getSettleEntityId() {
return this.settleEntityId;
}
public void setSettleEntityId(String settleEntityId) {
this.settleEntityId = settleEntityId;
}
public String getSettleEntityType() {
return this.settleEntityType;
}
public void setSettleEntityType(String settleEntityType) {
this.settleEntityType = settleEntityType;
}
public String getSummaryDimension() {
return this.summaryDimension;
}
public void setSummaryDimension(String summaryDimension) {
this.summaryDimension = summaryDimension;
}
public String getTransIn() {
return this.transIn;
}
public void setTransIn(String transIn) {
this.transIn = transIn;
}
public String getTransInType() {
return this.transInType;
}
public void setTransInType(String transInType) {
this.transInType = transInType;
}
}
|
package com.javarush.games.snake;
import com.javarush.engine.cell.*;
/**
* @author Sergey Ponomarev on 11.01.2021
* @project JavaRushTasks/com.javarush.games.snake
*/
public class Apple extends GameObject{
private static final String APPLE_SIGN = "\uD83C\uDF4E";
public boolean isAlive = true;
public Apple(int x, int y) {
super(x, y);
}
public void draw(Game game){
game.setCellValueEx(x, y, Color.NONE, APPLE_SIGN, Color.GREEN, 75);
}
}
|
/*
* Copyright 2015, The Querydsl Team (http://www.querydsl.com/team)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.querydsl.core;
import static com.querydsl.core.types.PathMetadataFactory.forVariable;
import com.querydsl.core.types.*;
import com.querydsl.core.types.dsl.*;
/**
* QDefaultQueryMetadata is a Querydsl query type for DefaultQueryMetadata
*/
public class QDefaultQueryMetadata extends EntityPathBase<DefaultQueryMetadata> {
private static final long serialVersionUID = 2000363531;
public static final QDefaultQueryMetadata defaultQueryMetadata = new QDefaultQueryMetadata("defaultQueryMetadata");
public final BooleanPath distinct = createBoolean("distinct");
public final SetPath<Expression<?>, SimplePath<Expression<?>>> exprInJoins = this.<Expression<?>, SimplePath<Expression<?>>>createSet("exprInJoins", Expression.class, SimplePath.class, PathInits.DIRECT);
public final SetPath<QueryFlag, SimplePath<QueryFlag>> flags = this.<QueryFlag, SimplePath<QueryFlag>>createSet("flags", QueryFlag.class, SimplePath.class, PathInits.DIRECT);
public final ListPath<Expression<?>, SimplePath<Expression<?>>> groupBy = this.<Expression<?>, SimplePath<Expression<?>>>createList("groupBy", Expression.class, SimplePath.class, PathInits.DIRECT);
public final SimplePath<Predicate> having = createSimple("having", Predicate.class);
public final ListPath<JoinExpression, SimplePath<JoinExpression>> joins = this.<JoinExpression, SimplePath<JoinExpression>>createList("joins", JoinExpression.class, SimplePath.class, PathInits.DIRECT);
public final SimplePath<QueryModifiers> modifiers = createSimple("modifiers", QueryModifiers.class);
public final ListPath<OrderSpecifier<?>, SimplePath<OrderSpecifier<?>>> orderBy = this.<OrderSpecifier<?>, SimplePath<OrderSpecifier<?>>>createList("orderBy", OrderSpecifier.class, SimplePath.class, PathInits.DIRECT);
public final MapPath<ParamExpression<?>, Object, SimplePath<Object>> params = this.<ParamExpression<?>, Object, SimplePath<Object>>createMap("params", ParamExpression.class, Object.class, SimplePath.class);
public final ListPath<Expression<?>, SimplePath<Expression<?>>> projection = this.<Expression<?>, SimplePath<Expression<?>>>createList("projection", Expression.class, SimplePath.class, PathInits.DIRECT);
public final BooleanPath unique = createBoolean("unique");
public final BooleanPath validate = createBoolean("validate");
public final SimplePath<ValidatingVisitor> validatingVisitor = createSimple("validatingVisitor", ValidatingVisitor.class);
public final SimplePath<Predicate> where = createSimple("where", Predicate.class);
public QDefaultQueryMetadata(String variable) {
super(DefaultQueryMetadata.class, forVariable(variable));
}
public QDefaultQueryMetadata(BeanPath<? extends DefaultQueryMetadata> entity) {
super(entity.getType(), entity.getMetadata());
}
public QDefaultQueryMetadata(PathMetadata metadata) {
super(DefaultQueryMetadata.class, metadata);
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.io;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.log.PerfLogger;
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobConfigurable;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.ReflectionUtils;
/**
* HiveInputFormat is a parameterized InputFormat which looks at the path name
* and determine the correct InputFormat for that path name from
* mapredPlan.pathToPartitionInfo(). It can be used to read files with different
* input format in the same map-reduce job.
*/
public class HiveInputFormat<K extends WritableComparable, V extends Writable>
implements InputFormat<K, V>, JobConfigurable {
public static final String CLASS_NAME = HiveInputFormat.class.getName();
public static final Log LOG = LogFactory.getLog(CLASS_NAME);
/**
* HiveInputSplit encapsulates an InputSplit with its corresponding
* inputFormatClass. The reason that it derives from FileSplit is to make sure
* "map.input.file" in MapTask.
*/
public static class HiveInputSplit extends FileSplit implements InputSplit,
Configurable {
InputSplit inputSplit;
String inputFormatClassName;
public HiveInputSplit() {
// This is the only public constructor of FileSplit
super((Path) null, 0, 0, (String[]) null);
}
public HiveInputSplit(InputSplit inputSplit, String inputFormatClassName) {
// This is the only public constructor of FileSplit
super((Path) null, 0, 0, (String[]) null);
this.inputSplit = inputSplit;
this.inputFormatClassName = inputFormatClassName;
}
public InputSplit getInputSplit() {
return inputSplit;
}
public String inputFormatClassName() {
return inputFormatClassName;
}
@Override
public Path getPath() {
if (inputSplit instanceof FileSplit) {
return ((FileSplit) inputSplit).getPath();
}
return new Path("");
}
/** The position of the first byte in the file to process. */
@Override
public long getStart() {
if (inputSplit instanceof FileSplit) {
return ((FileSplit) inputSplit).getStart();
}
return 0;
}
@Override
public String toString() {
return inputFormatClassName + ":" + inputSplit.toString();
}
@Override
public long getLength() {
long r = 0;
try {
r = inputSplit.getLength();
} catch (Exception e) {
throw new RuntimeException(e);
}
return r;
}
@Override
public String[] getLocations() throws IOException {
return inputSplit.getLocations();
}
@Override
public void readFields(DataInput in) throws IOException {
String inputSplitClassName = in.readUTF();
try {
inputSplit = (InputSplit) ReflectionUtils.newInstance(conf
.getClassByName(inputSplitClassName), conf);
} catch (Exception e) {
throw new IOException(
"Cannot create an instance of InputSplit class = "
+ inputSplitClassName + ":" + e.getMessage(), e);
}
inputSplit.readFields(in);
inputFormatClassName = in.readUTF();
}
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(inputSplit.getClass().getName());
inputSplit.write(out);
out.writeUTF(inputFormatClassName);
}
Configuration conf;
@Override
public Configuration getConf() {
return conf;
}
@Override
public void setConf(Configuration conf) {
this.conf = conf;
}
}
JobConf job;
public void configure(JobConf job) {
this.job = job;
}
/**
* A cache of InputFormat instances.
*/
protected static Map<Class, InputFormat<WritableComparable, Writable>> inputFormats;
public static InputFormat<WritableComparable, Writable> getInputFormatFromCache(
Class inputFormatClass, JobConf job) throws IOException {
if (inputFormats == null) {
inputFormats = new HashMap<Class, InputFormat<WritableComparable, Writable>>();
}
if (!inputFormats.containsKey(inputFormatClass)) {
try {
InputFormat<WritableComparable, Writable> newInstance = (InputFormat<WritableComparable, Writable>) ReflectionUtils
.newInstance(inputFormatClass, job);
inputFormats.put(inputFormatClass, newInstance);
} catch (Exception e) {
throw new IOException("Cannot create an instance of InputFormat class "
+ inputFormatClass.getName() + " as specified in mapredWork!", e);
}
}
return inputFormats.get(inputFormatClass);
}
public RecordReader getRecordReader(InputSplit split, JobConf job,
Reporter reporter) throws IOException {
HiveInputSplit hsplit = (HiveInputSplit) split;
InputSplit inputSplit = hsplit.getInputSplit();
String inputFormatClassName = null;
Class inputFormatClass = null;
try {
inputFormatClassName = hsplit.inputFormatClassName();
inputFormatClass = job.getClassByName(inputFormatClassName);
} catch (Exception e) {
throw new IOException("cannot find class " + inputFormatClassName, e);
}
if (this.mrwork == null) {
init(job);
}
boolean nonNative = false;
PartitionDesc part = pathToPartitionInfo.get(hsplit.getPath().toString());
if ((part != null) && (part.getTableDesc() != null)) {
Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), job);
nonNative = part.getTableDesc().isNonNative();
}
pushProjectionsAndFilters(job, inputFormatClass, hsplit.getPath()
.toString(), hsplit.getPath().toUri().getPath(), nonNative);
InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job);
RecordReader innerReader = null;
try {
innerReader = inputFormat.getRecordReader(inputSplit, job, reporter);
} catch (Exception e) {
innerReader = HiveIOExceptionHandlerUtil
.handleRecordReaderCreationException(e, job);
}
HiveRecordReader<K,V> rr = new HiveRecordReader(innerReader, job);
rr.initIOContext(hsplit, job, inputFormatClass, innerReader);
return rr;
}
protected Map<String, PartitionDesc> pathToPartitionInfo;
MapWork mrwork = null;
protected void init(JobConf job) {
mrwork = Utilities.getMapWork(job);
pathToPartitionInfo = mrwork.getPathToPartitionInfo();
}
public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException {
PerfLogger perfLogger = PerfLogger.getPerfLogger();
perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.GET_SPLITS);
init(job);
Path[] dirs = FileInputFormat.getInputPaths(job);
if (dirs.length == 0) {
throw new IOException("No input paths specified in job");
}
JobConf newjob = new JobConf(job);
ArrayList<InputSplit> result = new ArrayList<InputSplit>();
// for each dir, get the InputFormat, and do getSplits.
for (Path dir : dirs) {
PartitionDesc part = getPartitionDescFromPath(pathToPartitionInfo, dir);
// create a new InputFormat instance if this is the first time to see this
// class
Class inputFormatClass = part.getInputFileFormatClass();
InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job);
Utilities.copyTableJobPropertiesToConf(part.getTableDesc(), newjob);
// Make filter pushdown information available to getSplits.
ArrayList<String> aliases =
mrwork.getPathToAliases().get(dir.toUri().toString());
if ((aliases != null) && (aliases.size() == 1)) {
Operator op = mrwork.getAliasToWork().get(aliases.get(0));
if ((op != null) && (op instanceof TableScanOperator)) {
TableScanOperator tableScan = (TableScanOperator) op;
pushFilters(newjob, tableScan);
}
}
FileInputFormat.setInputPaths(newjob, dir);
newjob.setInputFormat(inputFormat.getClass());
InputSplit[] iss = inputFormat.getSplits(newjob, numSplits / dirs.length);
for (InputSplit is : iss) {
result.add(new HiveInputSplit(is, inputFormatClass.getName()));
}
}
LOG.info("number of splits " + result.size());
perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.GET_SPLITS);
return result.toArray(new HiveInputSplit[result.size()]);
}
public void validateInput(JobConf job) throws IOException {
init(job);
Path[] dirs = FileInputFormat.getInputPaths(job);
if (dirs.length == 0) {
throw new IOException("No input paths specified in job");
}
JobConf newjob = new JobConf(job);
// for each dir, get the InputFormat, and do validateInput.
for (Path dir : dirs) {
PartitionDesc part = getPartitionDescFromPath(pathToPartitionInfo, dir);
// create a new InputFormat instance if this is the first time to see this
// class
InputFormat inputFormat = getInputFormatFromCache(part
.getInputFileFormatClass(), job);
FileInputFormat.setInputPaths(newjob, dir);
newjob.setInputFormat(inputFormat.getClass());
ShimLoader.getHadoopShims().inputFormatValidateInput(inputFormat, newjob);
}
}
protected static PartitionDesc getPartitionDescFromPath(
Map<String, PartitionDesc> pathToPartitionInfo, Path dir)
throws IOException {
PartitionDesc partDesc = pathToPartitionInfo.get(dir.toString());
if (partDesc == null) {
partDesc = pathToPartitionInfo.get(dir.toUri().getPath());
}
if (partDesc == null) {
throw new IOException("cannot find dir = " + dir.toString()
+ " in partToPartitionInfo!");
}
return partDesc;
}
public static void pushFilters(JobConf jobConf, TableScanOperator tableScan) {
TableScanDesc scanDesc = tableScan.getConf();
if (scanDesc == null) {
return;
}
// construct column name list and types for reference by filter push down
Utilities.setColumnNameList(jobConf, tableScan);
Utilities.setColumnTypeList(jobConf, tableScan);
// push down filters
ExprNodeGenericFuncDesc filterExpr = (ExprNodeGenericFuncDesc)scanDesc.getFilterExpr();
if (filterExpr == null) {
return;
}
String filterText = filterExpr.getExprString();
String filterExprSerialized = Utilities.serializeExpression(filterExpr);
if (LOG.isDebugEnabled()) {
LOG.debug("Filter text = " + filterText);
LOG.debug("Filter expression = " + filterExprSerialized);
}
jobConf.set(
TableScanDesc.FILTER_TEXT_CONF_STR,
filterText);
jobConf.set(
TableScanDesc.FILTER_EXPR_CONF_STR,
filterExprSerialized);
}
protected void pushProjectionsAndFilters(JobConf jobConf, Class inputFormatClass,
String splitPath, String splitPathWithNoSchema) {
pushProjectionsAndFilters(jobConf, inputFormatClass, splitPath,
splitPathWithNoSchema, false);
}
protected void pushProjectionsAndFilters(JobConf jobConf, Class inputFormatClass,
String splitPath, String splitPathWithNoSchema, boolean nonNative) {
if (this.mrwork == null) {
init(job);
}
if(this.mrwork.getPathToAliases() == null) {
return;
}
ArrayList<String> aliases = new ArrayList<String>();
Iterator<Entry<String, ArrayList<String>>> iterator = this.mrwork
.getPathToAliases().entrySet().iterator();
while (iterator.hasNext()) {
Entry<String, ArrayList<String>> entry = iterator.next();
String key = entry.getKey();
boolean match;
if (nonNative) {
// For non-native tables, we need to do an exact match to avoid
// HIVE-1903. (The table location contains no files, and the string
// representation of its path does not have a trailing slash.)
match =
splitPath.equals(key) || splitPathWithNoSchema.equals(key);
} else {
// But for native tables, we need to do a prefix match for
// subdirectories. (Unlike non-native tables, prefix mixups don't seem
// to be a potential problem here since we are always dealing with the
// path to something deeper than the table location.)
match =
splitPath.startsWith(key) || splitPathWithNoSchema.startsWith(key);
}
if (match) {
ArrayList<String> list = entry.getValue();
for (String val : list) {
aliases.add(val);
}
}
}
for (String alias : aliases) {
Operator<? extends OperatorDesc> op = this.mrwork.getAliasToWork().get(
alias);
if (op instanceof TableScanOperator) {
TableScanOperator ts = (TableScanOperator) op;
// push down projections.
ColumnProjectionUtils.appendReadColumns(
jobConf, ts.getNeededColumnIDs(), ts.getNeededColumns());
// push down filters
pushFilters(jobConf, ts);
}
}
}
}
|
package cc.mrbird.febs.job.task;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class TestTask {
public void test(String params) {
log.info("我是带参数的test方法,正在被执行,参数为:{}" , params);
}
public void test1() {
log.info("我是不带参数的test1方法,正在被执行");
}
}
|
package com.wooki.core.mixins;
import org.apache.tapestry5.BindingConstants;
import org.apache.tapestry5.annotations.AfterRender;
import org.apache.tapestry5.annotations.Import;
import org.apache.tapestry5.annotations.InjectContainer;
import org.apache.tapestry5.annotations.Parameter;
import org.apache.tapestry5.corelib.base.AbstractLink;
import org.apache.tapestry5.ioc.annotations.Inject;
import org.apache.tapestry5.json.JSONObject;
import org.apache.tapestry5.services.javascript.JavaScriptSupport;
/**
* This mixins implements a simple show hide effect.
*
* @author ccordenier
*/
@Import(library =
{ "context:/static/js/wooki-core.js" })
public class ShowHideOnClick
{
/**
* This parameter is the element to display on click.
*/
@Parameter(required = true, allowNull = false, defaultPrefix = BindingConstants.LITERAL)
private String toShow;
/**
* This is the id of the element that will return to initial state on click.
*/
@Parameter(required = true, allowNull = false, defaultPrefix = BindingConstants.LITERAL)
private String hideLnkId;
/**
* Effect duration.
*/
@Parameter(required = true, allowNull = false, value = "200")
private int duration;
/**
* Reset the form on hide event.
*/
@Parameter(defaultPrefix = BindingConstants.LITERAL)
private String resetFormClass;
@InjectContainer
private AbstractLink showLnkId;
@Inject
private JavaScriptSupport support;
/**
* Generate Javascript method call.
*/
@AfterRender
public void initShowHideEffect()
{
JSONObject data = new JSONObject();
data.put("showLnkId", this.showLnkId.getClientId());
data.put("toShow", this.toShow);
data.put("hideLnkId", this.hideLnkId);
data.put("duration", this.duration);
// Also reset form if the link is inside a form
if (this.resetFormClass != null)
{
data.put("formClass", this.resetFormClass);
}
support.addInitializerCall("initShowHideEffect", data);
}
}
|
/*
*
* Copyright 2016,2017 DTCC, Fujitsu Australia Software Technology, IBM - All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.hyperledger.fabric.sdk;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import io.netty.util.internal.ConcurrentSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hyperledger.fabric.protos.discovery.Protocol;
import org.hyperledger.fabric.protos.gossip.Message;
import org.hyperledger.fabric.protos.msp.Identities;
import org.hyperledger.fabric.protos.msp.MspConfig;
import org.hyperledger.fabric.sdk.Channel.ServiceDiscoveryChaincodeCalls;
import org.hyperledger.fabric.sdk.ServiceDiscovery.SDLayout.SDGroup;
import org.hyperledger.fabric.sdk.exception.InvalidProtocolBufferRuntimeException;
import org.hyperledger.fabric.sdk.exception.ServiceDiscoveryException;
import org.hyperledger.fabric.sdk.helper.Config;
import org.hyperledger.fabric.sdk.helper.DiagnosticFileDumper;
import org.hyperledger.fabric.sdk.transaction.TransactionContext;
import static java.lang.String.format;
import static org.hyperledger.fabric.sdk.helper.Utils.toHexString;
public class ServiceDiscovery {
private static final Log logger = LogFactory.getLog(ServiceDiscovery.class);
private static final boolean DEBUG = logger.isDebugEnabled();
private static final Config config = Config.getConfig();
private static final boolean IS_TRACE_LEVEL = logger.isTraceEnabled();
private static final DiagnosticFileDumper diagnosticFileDumper = IS_TRACE_LEVEL
? config.getDiagnosticFileDumper() : null;
private static final int SERVICE_DISCOVERY_WAITTIME = config.getServiceDiscoveryWaitTime();
private static final Random random = new Random();
private final Collection<Peer> serviceDiscoveryPeers;
private final Channel channel;
private final TransactionContext transactionContext;
private final String channelName;
private volatile Map<String, SDChaindcode> chaindcodeMap = new HashMap<>();
ServiceDiscovery(Channel channel, Collection<Peer> serviceDiscoveryPeers, TransactionContext transactionContext) {
this.serviceDiscoveryPeers = serviceDiscoveryPeers;
this.channel = channel;
this.channelName = channel.getName();
this.transactionContext = transactionContext.retryTransactionSameContext();
}
SDChaindcode discoverEndorserEndpoint(TransactionContext transactionContext, final String name) throws ServiceDiscoveryException {
Map<String, SDChaindcode> lchaindcodeMap = chaindcodeMap;
if (lchaindcodeMap != null) { // check if we have it already.
SDChaindcode sdChaindcode = lchaindcodeMap.get(name);
if (null != sdChaindcode) {
return sdChaindcode;
}
}
final ServiceDiscoveryChaincodeCalls serviceDiscoveryChaincodeCalls = new ServiceDiscoveryChaincodeCalls(name);
LinkedList<ServiceDiscoveryChaincodeCalls> cc = new LinkedList<>();
cc.add(serviceDiscoveryChaincodeCalls);
List<List<ServiceDiscoveryChaincodeCalls>> ccl = new LinkedList<>();
ccl.add(cc);
Map<String, SDChaindcode> dchaindcodeMap = discoverEndorserEndpoints(transactionContext, ccl);
final SDChaindcode sdChaindcode = dchaindcodeMap.get(name);
if (null == sdChaindcode) {
throw new ServiceDiscoveryException(format("Failed to find and endorsers for chaincode %s. See logs for details", name));
}
return sdChaindcode;
}
Collection<String> getDiscoveredChaincodeNames() {
final SDNetwork lsdNetwork = fullNetworkDiscovery(false);
if (null == lsdNetwork) {
return Collections.emptyList();
}
return new ArrayList<>(lsdNetwork.getChaincodesNames());
}
class SDNetwork {
final Map<String, List<byte[]>> tlsCerts = new HashMap<>();
final Map<String, List<byte[]>> tlsIntermCerts = new HashMap<>();
long discoveryTime;
void addTlsCert(String mspid, byte[] cert) {
if (IS_TRACE_LEVEL) {
logger.trace(format("Channel %s service discovery MSPID %s adding TLSCert %s", channelName, mspid, toHexString(cert)));
}
tlsCerts.computeIfAbsent(mspid, k -> new LinkedList<>()).add(cert);
}
void addTlsIntermCert(String mspid, byte[] cert) {
if (IS_TRACE_LEVEL) {
logger.trace(format("Channel %s service discovery MSPID %s adding intermediate TLSCert %s", channelName, mspid, toHexString(cert)));
}
tlsIntermCerts.computeIfAbsent(mspid, k -> new LinkedList<>()).add(cert);
}
SDEndorser getEndorserByEndpoint(String endpoint) {
return endorsers.get(endpoint);
}
public Collection<SDEndorser> getEndorsers() {
return Collections.unmodifiableCollection(endorsers.values());
}
Map<String, SDEndorser> endorsers = Collections.emptyMap();
Map<String, SDOrderer> ordererEndpoints = Collections.emptyMap();
Set<String> getOrdererEndpoints() {
return Collections.unmodifiableSet(ordererEndpoints.keySet());
}
Collection<SDOrderer> getSDOrderers() {
return ordererEndpoints.values();
}
Set<String> getPeerEndpoints() {
return Collections.unmodifiableSet(endorsers.keySet());
}
Set<String> chaincodeNames = null;
Set<String> getChaincodesNames() {
if (null == chaincodeNames) {
if (null == endorsers) {
chaincodeNames = Collections.emptySet();
return chaincodeNames;
}
Set<String> ret = new HashSet<>();
endorsers.values().forEach(sdEndorser -> {
if (null != sdEndorser.chaincodesList) {
sdEndorser.chaincodesList.forEach(chaincode -> ret.add(chaincode.getName()));
}
});
chaincodeNames = ret;
}
return chaincodeNames;
}
Collection<byte[]> getTlsCerts(final String mspid) {
final Collection<byte[]> bytes = tlsCerts.get(mspid);
if (null == bytes) {
logger.debug(format("Channel %s no tls ca certs for mspid: %s", channelName, mspid));
return Collections.emptyList();
}
if (bytes.isEmpty()) {
logger.debug(format("Channel %s no tls ca certs for mspid: %s", channelName, mspid));
}
return Collections.unmodifiableCollection(bytes);
}
Collection<byte[]> getTlsIntermediateCerts(String mspid) {
final Collection<byte[]> bytes = tlsIntermCerts.get(mspid);
if (null == bytes) {
logger.debug(format("Channel %s no tls intermediary ca certs for mspid: %s", channelName, mspid));
return Collections.emptyList();
}
if (bytes.isEmpty()) {
logger.debug(format("Channel %s no tls intermediary ca certs for mspid: %s", channelName, mspid));
}
return Collections.unmodifiableCollection(bytes);
}
}
private volatile SDNetwork sdNetwork = null;
private final ConcurrentSet<ByteString> certs = new ConcurrentSet<>();
SDNetwork networkDiscovery(TransactionContext ltransactionContext, boolean force) {
logger.trace(format("Network discovery force: %b", force));
ArrayList<Peer> speers = new ArrayList<>(serviceDiscoveryPeers);
Collections.shuffle(speers);
SDNetwork ret = sdNetwork;
if (!force && null != ret && ret.discoveryTime + SERVICE_DISCOVER_FREQ_SECONDS * 1000 > System.currentTimeMillis()) {
return ret;
}
ret = null;
for (final Peer serviceDiscoveryPeer : speers) {
try {
SDNetwork lsdNetwork = new SDNetwork();
final byte[] clientTLSCertificateDigest = serviceDiscoveryPeer.getClientTLSCertificateDigest();
logger.info(format("Channel %s doing discovery with peer: %s", channelName, serviceDiscoveryPeer.toString()));
if (null == clientTLSCertificateDigest) {
throw new RuntimeException(format("Channel %s, peer %s requires mutual tls for service discovery.", channelName, serviceDiscoveryPeer));
}
ByteString clientIdent = ltransactionContext.getIdentity().toByteString();
ByteString tlshash = ByteString.copyFrom(clientTLSCertificateDigest);
Protocol.AuthInfo authentication = Protocol.AuthInfo.newBuilder().setClientIdentity(clientIdent).setClientTlsCertHash(tlshash).build();
List<Protocol.Query> fq = new ArrayList<>(2);
fq.add(Protocol.Query.newBuilder().setChannel(channelName).setConfigQuery(Protocol.ConfigQuery.newBuilder().build()).build());
fq.add(Protocol.Query.newBuilder().setChannel(channelName).setPeerQuery(Protocol.PeerMembershipQuery.newBuilder().build()).build());
Protocol.Request request = Protocol.Request.newBuilder().addAllQueries(fq).setAuthentication(authentication).build();
ByteString payloadBytes = request.toByteString();
ByteString signatureBytes = ltransactionContext.signByteStrings(payloadBytes);
Protocol.SignedRequest sr = Protocol.SignedRequest.newBuilder()
.setPayload(payloadBytes).setSignature(signatureBytes).build();
if (IS_TRACE_LEVEL && null != diagnosticFileDumper) { // dump protobuf we sent
logger.trace(format("Service discovery channel %s %s service chaincode query sent %s", channelName, serviceDiscoveryPeer,
diagnosticFileDumper.createDiagnosticProtobufFile(sr.toByteArray())));
}
final Protocol.Response response = serviceDiscoveryPeer.sendDiscoveryRequestAsync(sr).get(SERVICE_DISCOVERY_WAITTIME, TimeUnit.MILLISECONDS);
if (IS_TRACE_LEVEL && null != diagnosticFileDumper) { // dump protobuf we get
logger.trace(format("Service discovery channel %s %s service discovery returned %s", channelName, serviceDiscoveryPeer,
diagnosticFileDumper.createDiagnosticProtobufFile(response.toByteArray())));
}
serviceDiscoveryPeer.hasConnected();
final List<Protocol.QueryResult> resultsList = response.getResultsList();
Protocol.QueryResult queryResult;
Protocol.QueryResult queryResult2;
queryResult = resultsList.get(0); //configquery
if (queryResult.getResultCase().getNumber() == Protocol.QueryResult.ERROR_FIELD_NUMBER) {
logger.warn(format("Channel %s peer: %s error during service discovery %s", channelName, serviceDiscoveryPeer.toString(), queryResult.getError().getContent()));
continue;
}
queryResult2 = resultsList.get(1);
if (queryResult2.getResultCase().getNumber() == Protocol.QueryResult.ERROR_FIELD_NUMBER) {
logger.warn(format("Channel %s peer %s service discovery error %s", channelName, serviceDiscoveryPeer.toString(), queryResult2.getError().getContent()));
continue;
}
Protocol.ConfigResult configResult = queryResult.getConfigResult();
Map<String, MspConfig.FabricMSPConfig> msps = configResult.getMspsMap();
Set<ByteString> cbbs = new HashSet<>(msps.size() * 4);
for (Map.Entry<String, MspConfig.FabricMSPConfig> i : msps.entrySet()) {
final MspConfig.FabricMSPConfig value = i.getValue();
final String mspid = value.getName();
cbbs.addAll(value.getRootCertsList());
cbbs.addAll(value.getIntermediateCertsList());
value.getTlsRootCertsList().forEach(bytes -> lsdNetwork.addTlsCert(mspid, bytes.toByteArray()));
value.getTlsIntermediateCertsList().forEach(bytes -> lsdNetwork.addTlsIntermCert(mspid, bytes.toByteArray()));
}
List<byte[]> toaddCerts = new LinkedList<>();
synchronized (certs) {
cbbs.forEach(bytes -> {
if (certs.add(bytes)) {
toaddCerts.add(bytes.toByteArray());
}
});
}
if (!toaddCerts.isEmpty()) { // add them to crypto store.
channel.client.getCryptoSuite().loadCACertificatesAsBytes(toaddCerts);
}
Map<String, SDOrderer> ordererEndpoints = new HashMap<>();
Map<String, Protocol.Endpoints> orderersMap = configResult.getOrderersMap();
for (Map.Entry<String, Protocol.Endpoints> i : orderersMap.entrySet()) {
final String mspid = i.getKey();
Protocol.Endpoints value = i.getValue();
for (Protocol.Endpoint l : value.getEndpointList()) {
logger.trace(format("Channel: %s peer: %s discovered orderer MSPID: %s, endpoint: %s:%s", channelName, serviceDiscoveryPeer, mspid, l.getHost(), l.getPort()));
String endpoint = (l.getHost() + ":" + l.getPort()).trim().toLowerCase();
SDOrderer discoveredAlready = ordererEndpoints.get(endpoint);
if (discoveredAlready != null) {
if (!mspid.equals(discoveredAlready.getMspid())) {
logger.error(format("Service discovery in channel: %s, peer: %s found Orderer endpoint: %s with two mspids: '%s', '%s'", channelName, serviceDiscoveryPeer, endpoint, mspid, discoveredAlready.getMspid()));
continue; // report it and ignore.
}
logger.debug(format("Service discovery in channel: %s, peer: %s found Orderer endpoint: %s mspid: %s discovered twice", channelName, serviceDiscoveryPeer, endpoint, mspid));
continue;
}
final SDOrderer sdOrderer = new SDOrderer(mspid, endpoint, lsdNetwork.getTlsCerts(mspid), lsdNetwork.getTlsIntermediateCerts(mspid));
ordererEndpoints.put(sdOrderer.getEndPoint(), sdOrderer);
}
}
lsdNetwork.ordererEndpoints = ordererEndpoints;
Protocol.PeerMembershipResult membership = queryResult2.getMembers();
lsdNetwork.endorsers = new HashMap<>();
for (Map.Entry<String, Protocol.Peers> peers : membership.getPeersByOrgMap().entrySet()) {
final String mspId = peers.getKey();
final Protocol.Peers peer = peers.getValue();
for (Protocol.Peer pp : peer.getPeersList()) {
SDEndorser ppp = new SDEndorser(pp, lsdNetwork.getTlsCerts(mspId), lsdNetwork.getTlsIntermediateCerts(mspId));
SDEndorser discoveredAlready = lsdNetwork.endorsers.get(ppp.getEndpoint());
if (null != discoveredAlready) {
if (!mspId.equals(discoveredAlready.getMspid())) {
logger.error(format("Service discovery in channel: %s, peer: %s, found endorser endpoint: %s with two mspids: '%s', '%s'", channelName, serviceDiscoveryPeer, ppp.getEndpoint(), mspId, discoveredAlready.getMspid()));
continue; // report it and ignore.
}
logger.debug(format("Service discovery in channel %s peer: %s found Endorser endpoint: %s mspid: %s discovered twice", channelName, serviceDiscoveryPeer, ppp.getEndpoint(), mspId));
continue;
}
logger.trace(format("Channel %s peer: %s discovered peer mspid group: %s, endpoint: %s, mspid: %s", channelName, serviceDiscoveryPeer, mspId, ppp.getEndpoint(), ppp.getMspid()));
lsdNetwork.endorsers.put(ppp.getEndpoint(), ppp);
}
}
lsdNetwork.discoveryTime = System.currentTimeMillis();
sdNetwork = lsdNetwork;
ret = lsdNetwork;
break;
} catch (Exception e) {
logger.warn(format("Channel %s peer %s service discovery error %s", channelName, serviceDiscoveryPeer, e.getMessage()));
}
}
logger.debug(format("Channel %s service discovery completed: %b", channelName, ret != null));
return ret;
}
public static class SDOrderer {
private final String mspid;
private final Collection<byte[]> tlsCerts;
private final Collection<byte[]> tlsIntermediateCerts;
private final String endPoint;
SDOrderer(String mspid, String endPoint, Collection<byte[]> tlsCerts, Collection<byte[]> tlsIntermediateCerts) {
this.mspid = mspid;
this.endPoint = endPoint;
this.tlsCerts = tlsCerts;
this.tlsIntermediateCerts = tlsIntermediateCerts;
}
public Collection<byte[]> getTlsIntermediateCerts() {
return tlsIntermediateCerts;
}
public String getEndPoint() {
return endPoint;
}
public String getMspid() {
return mspid;
}
public Collection<byte[]> getTlsCerts() {
return tlsCerts;
}
}
Map<String, SDChaindcode> discoverEndorserEndpoints(TransactionContext transactionContext, List<List<ServiceDiscoveryChaincodeCalls>> chaincodeNames) throws ServiceDiscoveryException {
if (null == chaincodeNames) {
logger.warn("Discover of chaincode names was null.");
return Collections.emptyMap();
}
if (chaincodeNames.isEmpty()) {
logger.warn("Discover of chaincode names was empty.");
return Collections.emptyMap();
}
if (DEBUG) {
StringBuilder cns = new StringBuilder(1000);
String sep = "";
cns.append("[");
for (List<ServiceDiscoveryChaincodeCalls> s : chaincodeNames) {
ServiceDiscoveryChaincodeCalls n = s.get(0);
cns.append(sep).append(n.write(s.subList(1, s.size())));
sep = ", ";
}
cns.append("]");
logger.debug(format("Channel %s doing discovery for chaincodes: %s", channelName, cns.toString()));
}
ArrayList<Peer> speers = new ArrayList<>(serviceDiscoveryPeers);
Collections.shuffle(speers);
final Map<String, SDChaindcode> ret = new HashMap<>();
SDNetwork sdNetwork = networkDiscovery(transactionContext, false);
ServiceDiscoveryException serviceDiscoveryException = null;
for (Peer serviceDiscoveryPeer : speers) {
serviceDiscoveryException = null;
try {
logger.debug(format("Channel %s doing discovery for chaincodes on peer: %s", channelName, serviceDiscoveryPeer.toString()));
TransactionContext ltransactionContext = transactionContext.retryTransactionSameContext();
final byte[] clientTLSCertificateDigest = serviceDiscoveryPeer.getClientTLSCertificateDigest();
if (null == clientTLSCertificateDigest) {
logger.warn(format("Channel %s peer %s requires mutual tls for service discovery.", channelName, serviceDiscoveryPeer.toString()));
continue;
}
ByteString clientIdent = ltransactionContext.getIdentity().toByteString();
ByteString tlshash = ByteString.copyFrom(clientTLSCertificateDigest);
Protocol.AuthInfo authentication = Protocol.AuthInfo.newBuilder().setClientIdentity(clientIdent).setClientTlsCertHash(tlshash).build();
List<Protocol.Query> fq = new ArrayList<>(chaincodeNames.size());
for (List<ServiceDiscoveryChaincodeCalls> chaincodeName : chaincodeNames) {
if (ret.containsKey(chaincodeName.get(0).getName())) {
continue;
}
LinkedList<Protocol.ChaincodeCall> chaincodeCalls = new LinkedList<>();
chaincodeName.forEach(serviceDiscoveryChaincodeCalls -> chaincodeCalls.add(serviceDiscoveryChaincodeCalls.build()));
List<Protocol.ChaincodeInterest> cinn = new ArrayList<>(1);
chaincodeName.forEach(ServiceDiscoveryChaincodeCalls::build);
Protocol.ChaincodeInterest cci = Protocol.ChaincodeInterest.newBuilder().addAllChaincodes(chaincodeCalls).build();
cinn.add(cci);
Protocol.ChaincodeQuery chaincodeQuery = Protocol.ChaincodeQuery.newBuilder().addAllInterests(cinn).build();
fq.add(Protocol.Query.newBuilder().setChannel(channelName).setCcQuery(chaincodeQuery).build());
}
if (fq.size() == 0) {
//this would be odd but lets take care of it.
break;
}
Protocol.Request request = Protocol.Request.newBuilder().addAllQueries(fq).setAuthentication(authentication).build();
ByteString payloadBytes = request.toByteString();
ByteString signatureBytes = ltransactionContext.signByteStrings(payloadBytes);
Protocol.SignedRequest sr = Protocol.SignedRequest.newBuilder()
.setPayload(payloadBytes).setSignature(signatureBytes).build();
if (IS_TRACE_LEVEL && null != diagnosticFileDumper) { // dump protobuf we sent
logger.trace(format("Service discovery channel %s %s service chaincode query sent %s", channelName, serviceDiscoveryPeer,
diagnosticFileDumper.createDiagnosticProtobufFile(sr.toByteArray())));
}
logger.debug(format("Channel %s peer %s sending chaincode query request", channelName, serviceDiscoveryPeer.toString()));
final Protocol.Response response = serviceDiscoveryPeer.sendDiscoveryRequestAsync(sr).get(SERVICE_DISCOVERY_WAITTIME, TimeUnit.MILLISECONDS);
if (IS_TRACE_LEVEL && null != diagnosticFileDumper) { // dump protobuf we get
logger.trace(format("Service discovery channel %s %s service chaincode query returned %s", channelName, serviceDiscoveryPeer,
diagnosticFileDumper.createDiagnosticProtobufFile(response.toByteArray())));
}
logger.debug(format("Channel %s peer %s completed chaincode query request", channelName, serviceDiscoveryPeer.toString()));
serviceDiscoveryPeer.hasConnected();
for (Protocol.QueryResult queryResult : response.getResultsList()) {
if (queryResult.getResultCase().getNumber() == Protocol.QueryResult.ERROR_FIELD_NUMBER) {
ServiceDiscoveryException discoveryException = new ServiceDiscoveryException(format("Error %s", queryResult.getError().getContent()));
logger.error(discoveryException.getMessage());
continue;
}
if (queryResult.getResultCase().getNumber() != Protocol.QueryResult.CC_QUERY_RES_FIELD_NUMBER) {
ServiceDiscoveryException discoveryException = new ServiceDiscoveryException(format("Error expected chaincode endorsement query but got %s : ", queryResult.getResultCase().toString()));
logger.error(discoveryException.getMessage());
continue;
}
Protocol.ChaincodeQueryResult ccQueryRes = queryResult.getCcQueryRes();
if (ccQueryRes.getContentList().isEmpty()) {
throw new ServiceDiscoveryException(format("Error %s", queryResult.getError().getContent()));
}
for (Protocol.EndorsementDescriptor es : ccQueryRes.getContentList()) {
final String chaincode = es.getChaincode();
List<SDLayout> layouts = new LinkedList<>();
for (Protocol.Layout layout : es.getLayoutsList()) {
SDLayout sdLayout = null;
Map<String, Integer> quantitiesByGroupMap = layout.getQuantitiesByGroupMap();
for (Map.Entry<String, Integer> qmap : quantitiesByGroupMap.entrySet()) {
final String key = qmap.getKey();
final int quantity = qmap.getValue();
if (quantity < 1) {
continue;
}
Protocol.Peers peers = es.getEndorsersByGroupsMap().get(key);
if (peers == null || peers.getPeersCount() == 0) {
continue;
}
List<SDEndorser> sdEndorsers = new LinkedList<>();
for (Protocol.Peer pp : peers.getPeersList()) {
SDEndorser ppp = new SDEndorser(pp, null, null);
final String endPoint = ppp.getEndpoint();
SDEndorser nppp = sdNetwork.getEndorserByEndpoint(endPoint);
if (null == nppp) {
sdNetwork = networkDiscovery(transactionContext, true);
if (null == sdNetwork) {
throw new ServiceDiscoveryException("Failed to discover network resources.");
}
nppp = sdNetwork.getEndorserByEndpoint(ppp.getEndpoint());
if (null == nppp) {
throw new ServiceDiscoveryException(format("Failed to discover peer endpoint information %s for chaincode %s ", endPoint, chaincode));
}
}
sdEndorsers.add(nppp);
}
if (sdLayout == null) {
sdLayout = new SDLayout();
layouts.add(sdLayout);
}
sdLayout.addGroup(key, quantity, sdEndorsers);
}
}
if (layouts.isEmpty()) {
logger.warn(format("Channel %s chaincode %s discovered no layouts!", channelName, chaincode));
} else {
if (DEBUG) {
StringBuilder sb = new StringBuilder(1000);
sb.append("Channel ").append(channelName)
.append(" found ").append(layouts.size()).append(" layouts for chaincode: ").append(es.getChaincode());
sb.append(", layouts: [");
String sep = "";
for (SDLayout layout : layouts) {
sb.append(sep).append(layout);
sep = ", ";
}
sb.append("]");
logger.debug(sb.toString());
}
ret.put(es.getChaincode(), new SDChaindcode(es.getChaincode(), layouts));
}
}
}
if (ret.size() == chaincodeNames.size()) {
break; // found them all.
}
} catch (ServiceDiscoveryException e) {
logger.warn(format("Service discovery error on peer %s. Error: %s", serviceDiscoveryPeer.toString(), e.getMessage()));
serviceDiscoveryException = e;
} catch (Exception e) {
logger.warn(format("Service discovery error on peer %s. Error: %s", serviceDiscoveryPeer.toString(), e.getMessage()));
serviceDiscoveryException = new ServiceDiscoveryException(e);
}
}
if (null != serviceDiscoveryException) {
throw serviceDiscoveryException;
}
if (ret.size() != chaincodeNames.size()) {
logger.warn((format("Channel %s failed to find all layouts for chaincodes. Expected: %d and found: %d", channelName, chaincodeNames.size(), ret.size())));
}
return ret;
}
/**
* Endorsement selection by layout group that has least required and block height is the highest (most up to date).
*/
static final EndorsementSelector ENDORSEMENT_SELECTION_LEAST_REQUIRED_BLOCKHEIGHT = sdChaindcode -> {
List<SDLayout> layouts = sdChaindcode.getLayouts();
class LGroup { // local book keeping.
int stillRequred;
final Set<SDEndorser> endorsers = new HashSet<>();
LGroup(SDGroup group) {
endorsers.addAll(group.getEndorsers());
this.stillRequred = group.getStillRequired();
}
// return true if still required
boolean endorsed(Set<SDEndorser> endorsed) {
for (SDEndorser sdEndorser : endorsed) {
if (endorsers.contains(sdEndorser)) {
endorsers.remove(sdEndorser);
stillRequred = Math.max(0, stillRequred - 1);
}
}
return stillRequred > 0;
}
}
SDLayout pickedLayout = null;
Map<SDLayout, Set<SDEndorser>> layoutEndorsers = new HashMap<>();
// if (layouts.size() > 1) { // pick layout by least number of endorsers .. least number of peers hit and smaller block!
for (SDLayout sdLayout : layouts) {
Set<LGroup> remainingGroups = new HashSet<>();
for (SDGroup sdGroup : sdLayout.getSDLGroups()) {
remainingGroups.add(new LGroup(sdGroup));
}
// These are required as there is no choice.
Set<SDEndorser> required = new HashSet<>();
for (LGroup lgroup : remainingGroups) {
if (lgroup.stillRequred == lgroup.endorsers.size()) {
required.addAll(lgroup.endorsers);
}
}
//add those that there are no choice.
if (required.size() > 0) {
Set<LGroup> remove = new HashSet<>(remainingGroups.size());
for (LGroup lGroup : remainingGroups) {
if (!lGroup.endorsed(required)) {
remove.add(lGroup);
}
}
remainingGroups.removeAll(remove);
Set<SDEndorser> sdEndorsers = layoutEndorsers.computeIfAbsent(sdLayout, k -> new HashSet<>());
sdEndorsers.addAll(required);
}
if (remainingGroups.isEmpty()) { // no more groups here done for this layout.
continue; // done with this layout there really were no choices.
}
//Now go through groups finding which endorsers can satisfy the most groups.
do {
Map<SDEndorser, Integer> matchCount = new HashMap<>();
for (LGroup group : remainingGroups) {
for (SDEndorser sdEndorser : group.endorsers) {
Integer count = matchCount.get(sdEndorser);
if (count == null) {
matchCount.put(sdEndorser, 1);
} else {
matchCount.put(sdEndorser, ++count);
}
}
}
Set<SDEndorser> theMost = new HashSet<>();
int maxMatch = 0;
for (Map.Entry<SDEndorser, Integer> m : matchCount.entrySet()) {
int count = m.getValue();
SDEndorser sdEndorser = m.getKey();
if (count > maxMatch) {
theMost.clear();
theMost.add(sdEndorser);
maxMatch = count;
} else if (count == maxMatch) {
theMost.add(sdEndorser);
}
}
Set<SDEndorser> theVeryMost = new HashSet<>(1);
long max = 0L;
// Tie breaker: Pick one with greatest ledger height.
for (SDEndorser sd : theMost) {
if (sd.getLedgerHeight() > max) {
max = sd.getLedgerHeight();
theVeryMost.clear();
theVeryMost.add(sd);
}
}
Set<LGroup> remove2 = new HashSet<>(remainingGroups.size());
for (LGroup lGroup : remainingGroups) {
if (!lGroup.endorsed(theVeryMost)) {
remove2.add(lGroup);
}
}
Set<SDEndorser> sdEndorsers = layoutEndorsers.computeIfAbsent(sdLayout, k -> new HashSet<>());
sdEndorsers.addAll(theVeryMost);
remainingGroups.removeAll(remove2);
} while (!remainingGroups.isEmpty());
// Now pick the layout with least endorsers
}
//Pick layout which needs least endorsements.
int min = Integer.MAX_VALUE;
Set<SDLayout> theLeast = new HashSet<>();
for (Map.Entry<SDLayout, Set<SDEndorser>> l : layoutEndorsers.entrySet()) {
SDLayout sdLayoutK = l.getKey();
Integer count = l.getValue().size();
if (count < min) {
theLeast.clear();
theLeast.add(sdLayoutK);
min = count;
} else if (count == min) {
theLeast.add(sdLayoutK);
}
}
if (theLeast.size() == 1) {
pickedLayout = theLeast.iterator().next();
} else {
long max = 0L;
// Tie breaker: Pick one with greatest ledger height.
for (SDLayout sdLayout : theLeast) {
int height = 0;
for (SDEndorser sdEndorser : layoutEndorsers.get(sdLayout)) {
height += sdEndorser.getLedgerHeight();
}
if (height > max) {
max = height;
pickedLayout = sdLayout;
}
}
}
final SDEndorserState sdEndorserState = new SDEndorserState();
sdEndorserState.setPickedEndorsers(layoutEndorsers.get(pickedLayout));
sdEndorserState.setPickedLayout(pickedLayout);
return sdEndorserState;
};
public static final EndorsementSelector DEFAULT_ENDORSEMENT_SELECTION = ENDORSEMENT_SELECTION_LEAST_REQUIRED_BLOCKHEIGHT;
/**
* Endorsement selection by random layout group and random endorsers there in.
*/
public static final EndorsementSelector ENDORSEMENT_SELECTION_RANDOM = sdChaindcode -> {
List<SDLayout> layouts = sdChaindcode.getLayouts();
SDLayout pickedLayout = layouts.get(0);
if (layouts.size() > 1) { // more than one pick a random one.
pickedLayout = layouts.get(random.nextInt(layouts.size()));
}
Map<String, SDEndorser> retMap = new HashMap<>(); //hold results.
for (SDGroup group : pickedLayout.getSDLGroups()) { // go through groups getting random required endorsers
List<SDEndorser> endorsers = new ArrayList<>(group.getEndorsers());
int required = group.getStillRequired(); // what's needed in that group.
Collections.shuffle(endorsers); // randomize.
List<SDEndorser> sdEndorsers = endorsers.subList(0, required); // pick top endorsers.
sdEndorsers.forEach(sdEndorser -> {
retMap.putIfAbsent(sdEndorser.getEndpoint(), sdEndorser); // put if endpoint is not in there already.
});
}
final SDEndorserState sdEndorserState = new SDEndorserState(); //returned result.
sdEndorserState.setPickedEndorsers(retMap.values());
sdEndorserState.setPickedLayout(pickedLayout);
return sdEndorserState;
};
public static class SDChaindcode {
final String name;
final List<SDLayout> layouts;
SDChaindcode(SDChaindcode sdChaindcode) {
name = sdChaindcode.name;
layouts = new LinkedList<>();
sdChaindcode.layouts.forEach(sdLayout -> layouts.add(new SDLayout(sdLayout)));
}
SDChaindcode(String name, List<SDLayout> layouts) {
this.name = name;
this.layouts = layouts;
}
public List<SDLayout> getLayouts() {
return Collections.unmodifiableList(layouts);
}
// returns number of layouts left.
int ignoreList(Collection<String> names) {
if (names != null && !names.isEmpty()) {
layouts.removeIf(sdLayout -> !sdLayout.ignoreList(names));
}
return layouts.size();
}
int ignoreListSDEndorser(Collection<SDEndorser> sdEndorsers) {
if (sdEndorsers != null && !sdEndorsers.isEmpty()) {
layouts.removeIf(sdLayout -> !sdLayout.ignoreListSDEndorser(sdEndorsers));
}
return layouts.size();
}
boolean endorsedList(Collection<SDEndorser> sdEndorsers) {
boolean ret = false;
for (SDLayout sdLayout : layouts) {
if (sdLayout.endorsedList(sdEndorsers)) {
ret = true;
}
}
return ret;
}
// return the set needed or null if the policy was not meet.
Collection<SDEndorser> meetsEndorsmentPolicy(Set<SDEndorser> endpoints) {
Collection<SDEndorser> ret = null; // not meet.
for (SDLayout sdLayout : layouts) {
final Collection<SDEndorser> needed = sdLayout.meetsEndorsmentPolicy(endpoints);
if (needed != null && (ret == null || ret.size() > needed.size())) {
ret = needed; // needed is less so lets go with that.
}
}
return ret;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(1000);
sb.append("SDChaindcode(name: ").append(name);
if (null != layouts && !layouts.isEmpty()) {
sb.append(", layouts: [");
String sep = "";
for (SDLayout sdLayout : layouts) {
sb.append(sep).append(sdLayout + "");
sep = " ,";
}
sb.append("]");
}
sb.append(")");
return sb.toString();
}
}
public static class SDLayout {
final List<SDGroup> groups = new LinkedList<>();
SDLayout() {
}
//Copy constructor
SDLayout(SDLayout sdLayout) {
for (SDGroup group : sdLayout.groups) {
new SDGroup(group);
}
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(1000);
sb.append("SDLayout: {");
if (!groups.isEmpty()) {
sb.append("groups: [");
String sep2 = "";
for (SDGroup group : groups) {
sb.append(sep2).append(group.toString());
sep2 = ", ";
}
sb.append("]");
} else {
sb.append(", groups: []");
}
sb.append("}");
return sb.toString();
}
//return true if the groups still exist to get endorsement.
boolean ignoreList(Collection<String> names) {
boolean ret = true;
HashSet<String> bnames = new HashSet<>(names);
for (SDGroup group : groups) {
if (!group.ignoreList(bnames)) {
ret = false; // group can no longer be satisfied.
}
}
return ret;
}
boolean ignoreListSDEndorser(Collection<SDEndorser> names) {
boolean ret = true;
HashSet<SDEndorser> bnames = new HashSet<>(names);
for (SDGroup group : groups) {
if (!group.ignoreListSDEndorser(bnames)) {
ret = false; // group can no longer be satisfied.
}
}
return ret;
}
// endorsement has been meet.
boolean endorsedList(Collection<SDEndorser> sdEndorsers) {
int endorsementMeet = 0;
for (SDGroup group : groups) {
if (group.endorsedList(sdEndorsers)) {
++endorsementMeet;
}
}
return endorsementMeet >= groups.size();
}
// Returns null when not meet and endorsers needed if it is.
Collection<SDEndorser> meetsEndorsmentPolicy(Set<SDEndorser> endpoints) {
Set<SDEndorser> ret = new HashSet<>();
for (SDGroup group : groups) {
Collection<SDEndorser> sdEndorsers = group.meetsEndorsmentPolicy(endpoints, null);
if (null == sdEndorsers) {
return null; // group was not satisfied
}
ret.addAll(sdEndorsers); // add all these endorsers.
}
return ret;
}
public Collection<SDGroup> getSDLGroups() {
return new ArrayList<>(groups);
}
public class SDGroup {
final int required; // the number that's needed for the group to be endorsed.
final List<SDEndorser> endorsers = new LinkedList<>();
private final String name; // name of the groups - just for debug sake.
private int endorsed = 0; // number that have been now endorsed.
{
SDLayout.this.groups.add(this);
}
SDGroup(String name, int required, List<SDEndorser> endorsers) {
this.name = name;
this.required = required;
this.endorsers.addAll(endorsers);
}
SDGroup(SDGroup group) { //copy constructor
name = group.name;
required = group.required;
endorsers.addAll(group.endorsers);
endorsed = 0; // on copy reset to no endorsements
}
public int getStillRequired() {
return required - endorsed;
}
public String getName() {
return name;
}
public int getRequired() {
return required;
}
public Collection<SDEndorser> getEndorsers() {
return new ArrayList<>(endorsers);
}
//returns true if there are still sufficent endorsers for this group.
boolean ignoreList(Collection<String> names) {
HashSet<String> bnames = new HashSet<>(names);
endorsers.removeIf(endorser -> bnames.contains(endorser.getEndpoint()));
return endorsers.size() >= required;
}
//returns true if there are still sufficent endorsers for this group.
boolean ignoreListSDEndorser(Collection<SDEndorser> sdEndorsers) {
HashSet<SDEndorser> bnames = new HashSet<>(sdEndorsers);
endorsers.removeIf(endorser -> bnames.contains(endorser));
return endorsers.size() >= required;
}
// retrun true if th endorsements have been meet.
boolean endorsedList(Collection<SDEndorser> sdEndorsers) {
//This is going to look odd so here goes: Service discovery can't guarantee the endpoint certs are valid
// and so there may be multiple endpoints with different MSP ids. However if we have gotten an
// endorsement from an endpoint that means it's been satisfied and can be removed.
if (endorsed >= required) {
return true;
}
if (!sdEndorsers.isEmpty()) {
final Set<String> enames = new HashSet<>(sdEndorsers.size());
sdEndorsers.forEach(sdEndorser -> enames.add(sdEndorser.getEndpoint()));
endorsers.removeIf(endorser -> {
if (enames.contains(endorser.getEndpoint())) {
endorsed = Math.min(required, endorsed++);
return true; // remove it.
}
return false; // needs to stay in the list.
});
}
return endorsed >= required;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(512);
sb.append("SDGroup: { name: ").append(name).append(", required: ").append(required);
if (!endorsers.isEmpty()) {
sb.append(", endorsers: [");
String sep2 = "";
for (SDEndorser sdEndorser : endorsers) {
sb.append(sep2).append(sdEndorser.toString());
sep2 = ", ";
}
sb.append("]");
} else {
sb.append(", endorsers: []");
}
sb.append("}");
return sb.toString();
}
// Returns
Collection<SDEndorser> meetsEndorsmentPolicy(Set<SDEndorser> allEndorsed, Collection<SDEndorser> requiredYet) {
Set<SDEndorser> ret = new HashSet<>(this.endorsers.size());
for (SDEndorser hasBeenEndorsed : allEndorsed) {
for (SDEndorser sdEndorser : endorsers) {
if (hasBeenEndorsed.equals(sdEndorser)) {
ret.add(sdEndorser);
if (ret.size() >= required) {
return ret; // got what we needed.
}
}
}
}
if (null != requiredYet) {
for (SDEndorser sdEndorser : endorsers) {
if (!allEndorsed.contains(sdEndorser)) {
requiredYet.add(sdEndorser);
}
}
}
return null; // group has not meet endorsement.
}
}
void addGroup(String key, int required, List<SDEndorser> endorsers) {
new SDGroup(key, required, endorsers);
}
}
public static class SDEndorserState {
private Collection<SDEndorser> sdEndorsers = new ArrayList<>();
private SDLayout pickedLayout;
public void setPickedEndorsers(Collection<SDEndorser> sdEndorsers) {
this.sdEndorsers = sdEndorsers;
}
Collection<SDEndorser> getSdEndorsers() {
return sdEndorsers;
}
public void setPickedLayout(SDLayout pickedLayout) {
this.pickedLayout = pickedLayout;
}
public SDLayout getPickedLayout() {
return pickedLayout;
}
}
public static class SDEndorser {
private List<Message.Chaincode> chaincodesList;
// private final Protocol.Peer proto;
private String endPoint = null;
private String mspid;
private long ledgerHeight = -1L;
private final Collection<byte[]> tlsCerts;
private final Collection<byte[]> tlsIntermediateCerts;
SDEndorser() { // for testing only
tlsCerts = null;
tlsIntermediateCerts = null;
}
SDEndorser(Protocol.Peer peerRet, Collection<byte[]> tlsCerts, Collection<byte[]> tlsIntermediateCerts) {
this.tlsCerts = tlsCerts;
this.tlsIntermediateCerts = tlsIntermediateCerts;
parseEndpoint(peerRet);
parseLedgerHeight(peerRet);
parseIdentity(peerRet);
}
Collection<byte[]> getTLSCerts() {
return tlsCerts;
}
Collection<byte[]> getTLSIntermediateCerts() {
return tlsIntermediateCerts;
}
public String getEndpoint() {
return endPoint;
}
public long getLedgerHeight() {
return ledgerHeight;
}
private void parseIdentity(Protocol.Peer peerRet) {
try {
Identities.SerializedIdentity serializedIdentity = Identities.SerializedIdentity.parseFrom(peerRet.getIdentity());
mspid = serializedIdentity.getMspid();
} catch (InvalidProtocolBufferException e) {
throw new InvalidProtocolBufferRuntimeException(e);
}
}
private String parseEndpoint(Protocol.Peer peerRet) throws InvalidProtocolBufferRuntimeException {
if (null == endPoint) {
try {
Message.Envelope membershipInfo = peerRet.getMembershipInfo();
final ByteString membershipInfoPayloadBytes = membershipInfo.getPayload();
final Message.GossipMessage gossipMessageMemberInfo = Message.GossipMessage.parseFrom(membershipInfoPayloadBytes);
if (Message.GossipMessage.ContentCase.ALIVE_MSG.getNumber() != gossipMessageMemberInfo.getContentCase().getNumber()) {
throw new RuntimeException(format("Error %s", "bad"));
}
Message.AliveMessage aliveMsg = gossipMessageMemberInfo.getAliveMsg();
endPoint = aliveMsg.getMembership().getEndpoint();
if (endPoint != null) {
endPoint = endPoint.toLowerCase().trim(); //makes easier on comparing.
}
} catch (InvalidProtocolBufferException e) {
throw new InvalidProtocolBufferRuntimeException(e);
}
}
return endPoint;
}
private long parseLedgerHeight(Protocol.Peer peerRet) throws InvalidProtocolBufferRuntimeException {
if (-1L == ledgerHeight) {
try {
Message.Envelope stateInfo = peerRet.getStateInfo();
final Message.GossipMessage stateInfoGossipMessage = Message.GossipMessage.parseFrom(stateInfo.getPayload());
Message.GossipMessage.ContentCase contentCase = stateInfoGossipMessage.getContentCase();
if (contentCase.getNumber() != Message.GossipMessage.ContentCase.STATE_INFO.getNumber()) {
throw new RuntimeException("" + contentCase.getNumber());
}
Message.StateInfo stateInfo1 = stateInfoGossipMessage.getStateInfo();
ledgerHeight = stateInfo1.getProperties().getLedgerHeight();
this.chaincodesList = stateInfo1.getProperties().getChaincodesList();
} catch (InvalidProtocolBufferException e) {
throw new InvalidProtocolBufferRuntimeException(e);
}
}
return ledgerHeight;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof SDEndorser)) {
return false;
}
SDEndorser other = (SDEndorser) obj;
return Objects.equals(mspid, other.getMspid()) && Objects.equals(endPoint, other.getEndpoint());
}
@Override
public int hashCode() {
return Objects.hash(mspid, endPoint);
}
Set<String> getChaincodeNames() {
if (chaincodesList == null) {
return Collections.emptySet();
}
HashSet<String> ret = new HashSet<>(chaincodesList.size());
chaincodesList.forEach(chaincode -> ret.add(chaincode.getName()));
return ret;
}
public String getMspid() {
return mspid;
}
@Override
public String toString() {
return "SDEndorser-" + mspid + "-" + endPoint;
}
}
private static List<SDEndorser> topNbyHeight(int required, List<SDEndorser> endorsers) {
ArrayList<SDEndorser> ret = new ArrayList<>(endorsers);
ret.sort(Comparator.comparingLong(SDEndorser::getLedgerHeight));
return ret.subList(Math.max(ret.size() - required, 0), ret.size());
}
private ScheduledFuture<?> seviceDiscovery = null;
private static final int SERVICE_DISCOVER_FREQ_SECONDS = config.getServiceDiscoveryFreqSeconds();
void run() {
if (channel.isShutdown() || SERVICE_DISCOVER_FREQ_SECONDS < 1) {
return;
}
if (seviceDiscovery == null) {
seviceDiscovery = Executors.newSingleThreadScheduledExecutor(r -> {
Thread t = Executors.defaultThreadFactory().newThread(r);
t.setDaemon(true);
return t;
}).scheduleAtFixedRate(() -> {
logger.debug(format("Channel %s starting service rediscovery after %d seconds.", channelName, SERVICE_DISCOVER_FREQ_SECONDS));
fullNetworkDiscovery(true);
}, SERVICE_DISCOVER_FREQ_SECONDS, SERVICE_DISCOVER_FREQ_SECONDS, TimeUnit.SECONDS);
}
}
SDNetwork fullNetworkDiscovery(boolean force) {
if (channel.isShutdown()) {
return null;
}
logger.trace(format("Full network discovery force: %b", force));
try {
SDNetwork osdNetwork = sdNetwork;
SDNetwork lsdNetwork = networkDiscovery(transactionContext.retryTransactionSameContext(), force);
if (channel.isShutdown() || null == lsdNetwork) {
return null;
}
if (osdNetwork != lsdNetwork) { // means it changed.
final Set<String> chaincodesNames = lsdNetwork.getChaincodesNames();
List<List<ServiceDiscoveryChaincodeCalls>> lcc = new LinkedList<>();
chaincodesNames.forEach(s -> {
List<ServiceDiscoveryChaincodeCalls> lc = new LinkedList<>();
lc.add(new ServiceDiscoveryChaincodeCalls(s));
lcc.add(lc);
});
chaindcodeMap = discoverEndorserEndpoints(transactionContext.retryTransactionSameContext(), lcc);
if (channel.isShutdown()) {
return null;
}
channel.sdUpdate(lsdNetwork);
}
return lsdNetwork;
} catch (Exception e) {
logger.warn("Service discovery got error:" + e.getMessage(), e);
} finally {
logger.trace("Full network rediscovery completed.");
}
return null;
}
void shutdown() {
logger.trace("Service discovery shutdown.");
try {
final ScheduledFuture<?> lseviceDiscovery = seviceDiscovery;
seviceDiscovery = null;
if (null != lseviceDiscovery) {
lseviceDiscovery.cancel(true);
}
} catch (Exception e) {
logger.error(e);
//best effort.
}
}
@Override
protected void finalize() throws Throwable {
shutdown();
super.finalize();
}
public interface EndorsementSelector {
SDEndorserState endorserSelector(SDChaindcode sdChaindcode);
EndorsementSelector ENDORSEMENT_SELECTION_RANDOM = ServiceDiscovery.ENDORSEMENT_SELECTION_RANDOM;
EndorsementSelector ENDORSEMENT_SELECTION_LEAST_REQUIRED_BLOCKHEIGHT = ServiceDiscovery.ENDORSEMENT_SELECTION_LEAST_REQUIRED_BLOCKHEIGHT;
}
}
|
/*
* Copyright 2006-2016 Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.style;
import com.intellij.codeInspection.CleanupLocalInspectionTool;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.siyeh.InspectionGadgetsBundle;
import com.siyeh.ig.BaseInspection;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.InspectionGadgetsFix;
import com.siyeh.ig.fixes.AddThisQualifierFix;
import org.jetbrains.annotations.NotNull;
public class UnqualifiedMethodAccessInspection extends BaseInspection implements CleanupLocalInspectionTool {
@Override
@NotNull
public String getDisplayName() {
return InspectionGadgetsBundle.message("unqualified.method.access.display.name");
}
@Override
public BaseInspectionVisitor buildVisitor() {
return new UnqualifiedMethodAccessVisitor();
}
@Override
@NotNull
protected String buildErrorString(Object... infos) {
return InspectionGadgetsBundle.message("unqualified.method.access.problem.descriptor");
}
@Override
public InspectionGadgetsFix buildFix(Object... infos) {
return new AddThisQualifierFix();
}
private static class UnqualifiedMethodAccessVisitor extends BaseInspectionVisitor {
@Override
public void visitReferenceExpression(@NotNull PsiReferenceExpression expression) {
super.visitReferenceExpression(expression);
final PsiExpression qualifierExpression = expression.getQualifierExpression();
if (qualifierExpression != null) {
return;
}
final PsiReferenceParameterList parameterList = expression.getParameterList();
if (parameterList == null) {
return;
}
final PsiElement element = expression.resolve();
if (!(element instanceof PsiMethod)) {
return;
}
final PsiMethod method = (PsiMethod)element;
if (method.isConstructor() || method.hasModifierProperty(PsiModifier.STATIC)) {
return;
}
final PsiClass containingClass = method.getContainingClass();
if (containingClass == null) {
return;
}
if (PsiUtil.isLocalOrAnonymousClass(containingClass)) {
final PsiClass expressionClass = PsiTreeUtil.getParentOfType(expression, PsiClass.class);
if (expressionClass == null || !expressionClass.equals(containingClass)) {
// qualified this expression not possible for anonymous or local class
return;
}
}
registerError(expression);
}
}
}
|
/*
* #%L
* *********************************************************************************************************************
*
* blueHour
* http://bluehour.tidalwave.it - git clone git@bitbucket.org:tidalwave/bluehour-src.git
* %%
* Copyright (C) 2013 - 2021 Tidalwave s.a.s. (http://tidalwave.it)
* %%
* *********************************************************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* *********************************************************************************************************************
*
*
* *********************************************************************************************************************
* #L%
*/
package it.tidalwave.accounting.model.spi.util;
import javax.annotation.Nonnull;
import java.util.HashMap;
import java.util.Map;
import it.tidalwave.accounting.model.types.Money;
/***********************************************************************************************************************
*
* @author Fabrizio Giudici
*
**********************************************************************************************************************/
public class MoneyFormat
{
private static final Map<String, String> CURRENCY_SYMBOL_MAP = new HashMap<>();
static
{
CURRENCY_SYMBOL_MAP.put("EUR", "€");
CURRENCY_SYMBOL_MAP.put("USD", "$");
}
@Nonnull
public String format (@Nonnull final Money amount)
{
final String currency = amount.getCurrency();
return String.format("%s %s", Money.getFormat().format(amount.getAmount()),
CURRENCY_SYMBOL_MAP.getOrDefault(currency, currency));
}
}
|
/*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 2008-2009, The KiWi Project (http://www.kiwi-project.eu)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the KiWi Project nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* Contributor(s):
*
*
*/
package tagit2.action.explorer;
import java.io.Serializable;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import kiwi.api.config.ConfigurationService;
import kiwi.api.entity.KiWiEntityManager;
import kiwi.api.geo.Location;
import kiwi.api.triplestore.TripleStore;
import kiwi.context.CurrentContentItemFactory;
import kiwi.model.Constants;
import kiwi.model.content.ContentItem;
import kiwi.model.facades.PointOfInterestFacade;
import org.apache.solr.client.solrj.response.FacetField.Count;
import org.jboss.seam.ScopeType;
import org.jboss.seam.annotations.In;
import org.jboss.seam.annotations.Logger;
import org.jboss.seam.annotations.Name;
import org.jboss.seam.annotations.Scope;
import org.jboss.seam.annotations.Synchronized;
import org.jboss.seam.annotations.Transactional;
import org.jboss.seam.log.Log;
import tagit2.action.query.LayeringAction;
import tagit2.action.query.SearchAction;
import tagit2.api.query.IconService;
import tagit2.api.query.QueryService;
import tagit2.api.tagcloud.TagitTagCloudService;
import tagit2.util.exchange.MapMarker;
import tagit2.util.exchange.MapSettings;
import tagit2.util.geo.Area;
import tagit2.util.geo.Ratio;
import tagit2.util.query.Cluster;
import tagit2.util.query.Point;
import tagit2.util.query.TagitSearchResult;
import tagit2.util.tagcloud.TagCloudItem;
@Name("tagit2.explorerAction")
@Scope(ScopeType.PAGE)
//@Transactional
@Synchronized(timeout=1000000000)
public class ExplorerAction implements Serializable {
/**
*
*/
private static final long serialVersionUID = 1L;
public enum Mode {NONE, CREATE_LOCATION, CREATE_ROUTE}
//Statics
private static final double HOMEBASE_LAT = 47.8;
private static final double HOMEBASE_LNG = 13.03;
private static final int HOMEBASE_ZOOM = 13;
private static final Ratio RATIO = new Ratio(7,5);
private static final int MAX_CLUSTER_SIZE = 20;
public static final String KEY = Constants.GOOGLE_KEY;
private Mode mode = Mode.NONE;
//Injections
@Logger
Log log;
@In
private TripleStore tripleStore;
@In(create=true)
private ContentItem currentContentItem;
@In
private KiWiEntityManager kiwiEntityManager;
@In(create=true)
private CurrentContentItemFactory currentContentItemFactory;
@In(create=true)
private ConfigurationService configurationService;
@In(value="tagit2.layeringAction",create=true)
private LayeringAction layeringAction;
@In(value="tagit2.searchAction",create=true)
private SearchAction searchAction;
@In(value="tagit2.queryService")
private QueryService queryService;
@In(value="tagit2.tagCloudService",create=true)
private TagitTagCloudService tagCloudService;
@In(value="tagit2.clusterAction",create=true)
private ClusterAction clusterAction;
@In(value="tagit2.iconService",create=true)
private IconService iconService;
//Other Properties
/**
* center of map. This value is used to center the map on a configured point (if current
* ContentItem has no Geoposition) or the Geoposition of the CurrentContentItem
*/
private Location center;
private int zoom;
private Area bounds;
private double swLat,swLng,neLat,neLng;
private long currentId;
private long currentClusterId;
//something for return
private List<Count> tagFacets = Collections.EMPTY_LIST;
private List<Point> allMarkers = Collections.EMPTY_LIST;
private List<Cluster> clusters = Collections.EMPTY_LIST;
//Getters and Setters
public Location getCenter() {
return center;
}
public void setZoom(int zoom) {
this.zoom = zoom;
}
public int getZoom() {
return zoom;
}
public void setSwLat(double swLat) {
this.swLat = swLat;
}
public void setSwLng(double swLng) {
this.swLng = swLng;
}
public void setNeLat(double neLat) {
this.neLat = neLat;
}
public void setNeLng(double neLng) {
this.neLng = neLng;
}
public void setCurrentId(long currentId) {
this.currentId = currentId;
}
public String getGmapkey() {
return configurationService.getConfiguration("tagit.googlekey", KEY);
}
//other public methods
public void setCurrentClusterId(long currentClusterId) {
this.currentClusterId = currentClusterId;
}
/**
* set currentContentItem by id
* @param currentId
*/
public void setPoint() {
currentContentItemFactory.setCurrentItemId(currentId);
currentContentItemFactory.refresh();
log.info("set point to id #0", currentId);
}
/**
* unselect a point via contentItemFactory
*/
public void unselectPoint() {
//reset mode
mode = Mode.NONE;
currentContentItemFactory.setCurrentItemId(null);
currentContentItemFactory.setCurrentItemKiWiId(null);
currentContentItemFactory.setCurrentItemTitle(null);
currentContentItemFactory.setCurrentItemUri(null);
currentContentItemFactory.refresh();
}
/**
* returns a JSON representation of current cluster, if there is one; an empty cluster
* otherwise
* @return
*/
public void setCluster() {
if( currentClusterId == -1 ) {
clusterAction.resetCluster();
} else {
for( Cluster c : clusters ) {
if( c.getId() == currentClusterId ) {
log.info("Cluster: #0", c.getId());
clusterAction.setCluster(c);
}
}
}
}
public Area getClusterArea() {
for( Cluster c : clusters ) {
if( c.getId() == currentClusterId ) {
log.info("Cluster: #0", c.getId());
clusterAction.setCluster(c);
if( c.getSize() > MAX_CLUSTER_SIZE ) {
return c.getArea();
} else {
return null;
}
}
}
return null;
}
/**
* if currentContentItem has a Geolocation, center is set to this location
* @return id of currentContentItem, if it has a Geolocation, -1 otherwise
*/
public MapSettings checkCurrent() {
zoom = configurationService.getIntConfiguration("tagit.zoom",HOMEBASE_ZOOM);
if (currentContentItem != null && currentContentItem.getResource().hasType(tripleStore.createUriResource(Constants.NS_GEO + "Point"))) {
//set center
PointOfInterestFacade currentPoi = kiwiEntityManager.createFacade(currentContentItem,
PointOfInterestFacade.class);
center = new Location(currentPoi.getLatitude(),currentPoi.getLongitude());
//return settings
return new MapSettings(center,zoom,currentPoi.getId());
} else {
//get lat/lng by configService
double lat = configurationService.getDoubleConfiguration("tagit.center.lat",HOMEBASE_LAT);
double lng = configurationService.getDoubleConfiguration("tagit.center.lng",HOMEBASE_LNG);
center = new Location(lat,lng);
return new MapSettings(center,zoom,-1);
}
}
/**
* bounds are set by properties, that are binded on jsFunction. After that markers that are initialized by
* a clustering algorithm are returned.
* @return
*/
public List<MapMarker> setBoundsAndZoom() {
this.bounds = new Area(swLat,swLng,neLat,neLng);
//is no layer selected?
if( layeringAction.getLayerQuery() == null ) {
tagFacets = Collections.EMPTY_LIST;
return Collections.EMPTY_LIST;
} else {
//return getSinglePois();
return getClusteredPois();
}
}
/**
* returns the minimal area that is needed to display the whole search on a map
* @return
*/
public Area getBounds() {
return queryService.getBounds(searchAction.getKeyword(), layeringAction.getLayerQuery());
}
/**
* returns a List of tagCloudItema based on the tagFacets
* @return
*/
public List<TagCloudItem> getTagCloud() {
return tagCloudService.getTagCloud(tagFacets);
}
public List<MapMarker> getAllMarkers() {
//TODO there should be a type of paging
return Collections.EMPTY_LIST;
}
/**
* this is a test function, witch uses the query method that returns unclustered results
* @return
*/
private List<MapMarker> getSinglePois() {
//to test it, get unclustered points
TagitSearchResult result = queryService.getMarkers(bounds, searchAction.getKeyword(), layeringAction.getLayerQuery());
List<MapMarker> l = new LinkedList<MapMarker>();
for( Point p : result.getPoints() ) {
l.add( p.getMapMarker() );
}
log.info("#0 markers found", l.size());
//set tagFacets
allMarkers = result.getAllPoints();
tagFacets = result.getTagFacets();
log.info("#0 factes found", tagFacets.size());
return l;
}
/**
* This method returns a list of points and clusters get by the clustering of queryService.
* It stores the facets, clusters and a list of all pois (maybe usable for a list view of results)
* @return
*/
private List<MapMarker> getClusteredPois() {
TagitSearchResult result = queryService.getClusteredMarkers(bounds, RATIO ,searchAction.getKeyword(), layeringAction.getLayerQuery());
//init result lists
List<MapMarker> l = new LinkedList<MapMarker>();
//store clusters and facets
this.clusters = result.getClusters();
this.allMarkers = result.getAllPoints();
this.tagFacets = result.getTagFacets();
//set icons to visible single markers
iconService.setIconsOf( result.getPoints() );
for( Point p : result.getPoints() ) {
l.add( p.getMapMarker() );
}
for( Cluster c : this.clusters ) {
l.add( c.getMapMarker() );
}
return l;
}
public void setMode(Mode mode) {
this.mode = mode;
}
public Mode getMode() {
return mode;
}
public void startCreateLocation() {
mode = Mode.CREATE_LOCATION;
}
public void startCreateRoute() {
mode = Mode.CREATE_ROUTE;
}
}
|
/*
* Copyright 2015 Adaptris Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.adaptris.core.services;
import com.adaptris.annotation.AdapterComponent;
import com.adaptris.annotation.ComponentProfile;
import com.adaptris.core.AdaptrisMessage;
import com.adaptris.core.CoreException;
import com.adaptris.core.ServiceException;
import com.adaptris.core.ServiceImp;
import com.adaptris.core.services.exception.ThrowExceptionService;
import com.adaptris.core.util.LoggingHelper;
import com.thoughtworks.xstream.annotations.XStreamAlias;
/**
* Always fail.
*
* @config always-fail-service
*
*
* @author lchan
* @deprecated since 3.0.0 consider using {@link ThrowExceptionService} instead which wils give you a better exception message.
*/
@Deprecated
@XStreamAlias("always-fail-service")
@AdapterComponent
@ComponentProfile(summary = "Deprecated: use ThrowExceptionService instead", tag = "service")
public class AlwaysFailService extends ServiceImp {
private static transient boolean warningLogged;
public AlwaysFailService() {
super();
LoggingHelper.logDeprecation(warningLogged, ()-> { warningLogged=true;}, this.getClass().getSimpleName(), ThrowExceptionService.class.getName());
}
@Override
public void doService(AdaptrisMessage msg) throws ServiceException {
throw new ServiceException(this.getClass().getName());
}
@Override
protected void initService() throws CoreException {
}
@Override
protected void closeService() {
}
@Override
public void prepare() throws CoreException {
}
}
|
/*
* Copyright (c) 2019-2021 "Neo4j,"
* Neo4j Sweden AB [https://neo4j.com]
*
* This file is part of Neo4j.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.neo4j.cypherdsl.core;
import static org.apiguardian.api.API.Status.EXPERIMENTAL;
import org.apiguardian.api.API;
import org.neo4j.cypherdsl.core.ast.Visitable;
import org.neo4j.cypherdsl.core.ast.Visitor;
import org.neo4j.cypherdsl.core.internal.Distinct;
/**
* See <a href="https://s3.amazonaws.com/artifacts.opencypher.org/M15/railroad/With.html">With</a>.
*
* @author Michael J. Simons
* @soundtrack Ferris MC - Ferris MC's Audiobiographie
* @since 1.0
*/
@API(status = EXPERIMENTAL, since = "1.0")
public final class With implements Visitable, Clause {
private final Distinct distinct;
private final ReturnBody body;
private final Where where;
With(Return returnClause, Where where) {
this.distinct = returnClause.getDistinct();
this.body = returnClause.getBody();
this.where = where;
}
With(boolean distinct, ExpressionList returnItems, Order order, Skip skip, Limit limit, Where where) {
this.distinct = distinct ? Distinct.INSTANCE : null;
this.body = new ReturnBody(returnItems, order, skip, limit);
this.where = where;
}
@Override
public void accept(Visitor visitor) {
visitor.enter(this);
Visitable.visitIfNotNull(this.distinct, visitor);
this.body.accept(visitor);
Visitable.visitIfNotNull(where, visitor);
visitor.leave(this);
}
}
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.polyglotted.esjwt;
import io.polyglotted.esjwt.realm.JwtAuthFailureHandler;
import io.polyglotted.esjwt.realm.JwtRealm;
import io.polyglotted.esjwt.realm.JwtRealmFactory;
import org.apache.http.HttpHeaders;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.core.extensions.XPackExtension;
import org.elasticsearch.xpack.core.security.authc.AuthenticationFailureHandler;
import org.elasticsearch.xpack.core.security.authc.Realm;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
/**
* The extension class that serves as the integration point between Elasticsearch, X-Pack, and any JWT provider
*/
public class JwtRealmExtension extends XPackExtension {
@Override public String name() { return JwtRealm.TYPE; }
@Override public String description() { return "JWT Realm Extension"; }
/**
* Returns a collection of header names that will be used by this extension. This is necessary to ensure the headers are copied from
* the incoming request and made available to the realm.
*/
@Override public Collection<String> getRestHeaders() { return Collections.singleton(HttpHeaders.AUTHORIZATION); }
/**
* Returns a map of the custom realms provided by this extension. The first parameter is the string representation of the realm type;
* this is the value that is specified when declaring a realm in the settings. Note, the realm type cannot be one of the types
* defined by X-Pack. In order to avoid a conflict, you may wish to use some prefix to your realm types.
* <p>
* The second parameter is an instance of the {@link Realm.Factory} implementation. This factory class will be used to create realms of
* this type that are defined in the elasticsearch settings.
*/
@Override
public Map<String, Realm.Factory> getRealms(ResourceWatcherService resourceWatcherService) {
return new MapBuilder<String, Realm.Factory>().put(JwtRealm.TYPE, new JwtRealmFactory()).immutableMap();
}
/**
* Returns the custom implementaion {@link JwtAuthFailureHandler}
*/
@Override
public AuthenticationFailureHandler getAuthenticationFailureHandler() { return new JwtAuthFailureHandler(); }
}
|
package com.shanai.sysstatus.service;
import com.shanai.sysstatus.dto.MonitorInfoBean;
import lombok.SneakyThrows;
import oshi.SystemInfo;
import oshi.hardware.HWDiskStore;
import oshi.hardware.HardwareAbstractionLayer;
import java.io.File;
import java.lang.management.ManagementFactory;
import java.lang.management.OperatingSystemMXBean;
import java.util.Date;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
/**
* @author wct
*/
public class MonitorServiceImpl extends Timer implements IMonitorService {
/**
* 获得当前的监控对象.
* @return 返回构造好的监控对象
*/
@SneakyThrows
private MonitorInfoBean getMonitorInfoBean() {
MonitorInfoBean monitorInfoBean = new MonitorInfoBean();
//============================ 内存 CPU ====================/
int Mb = 1024 * 1024;
int Gb = 1024 * 1024 * 1024;
// JVM 使用内存
long totalMemory = Runtime.getRuntime().totalMemory() / Mb;
// JVM 空闲内存
long freeMemory = Runtime.getRuntime().freeMemory() / Mb;
// JVM 最大内存
long maxMemory = Runtime.getRuntime().maxMemory() / Mb;
// osmxb
OperatingSystemMXBean operatingSystemMXBean = ManagementFactory.getPlatformMXBean(OperatingSystemMXBean.class);
// 系统平台
String os_name = System.getProperty("os.name");
com.sun.management.OperatingSystemMXBean osBean = ManagementFactory
.getPlatformMXBean(com.sun.management.OperatingSystemMXBean.class);
double os_processCpuLoad = osBean.getProcessCpuLoad();
double os_systemCpuLoad = osBean.getSystemCpuLoad();
long os_totalMemory = osBean.getTotalPhysicalMemorySize() / Mb;
long os_freePhysicalMemorySize = osBean.getFreePhysicalMemorySize() / Mb;
double systemLoadAverage = osBean.getSystemLoadAverage();
//=================================================================/
//================== 磁盘=====================/
File[] files = File.listRoots();
for (File file : files) {
// 信息
String path = file.getPath();
// 空闲
long freeSpace = file.getFreeSpace() / Gb;
// 已使用
long usableSpace = file.getUsableSpace() / Gb;
// 总空间
long totalSpace = file.getTotalSpace() /Gb;
monitorInfoBean.getSpaces().add(new MonitorInfoBean.Space(path,freeSpace,usableSpace,totalSpace));
}
monitorInfoBean.setTotalMemory(totalMemory);
monitorInfoBean.setFreeMemory(freeMemory);
monitorInfoBean.setMaxMemory(maxMemory);
monitorInfoBean.setOsName(os_name);
monitorInfoBean.setTotalMemorySize(os_totalMemory);
monitorInfoBean.setFreePhysicalMemorySize(os_freePhysicalMemorySize);
monitorInfoBean.setUsedMemory(os_totalMemory - os_freePhysicalMemorySize);
monitorInfoBean.setSystemLoadAverage(systemLoadAverage);
monitorInfoBean.setCpuRatio(os_systemCpuLoad);
SystemInfo systemInfo = new SystemInfo();
HardwareAbstractionLayer hardware = systemInfo.getHardware();
List<HWDiskStore> diskStores = hardware.getDiskStores();
for (HWDiskStore diskStore : diskStores) {
long reads = diskStore.getReads();
long writes = diskStore.getWrites();
long transferTime = diskStore.getTransferTime();
String name = diskStore.getName();
monitorInfoBean.getSpaceInfos().add(new MonitorInfoBean.SpaceInfo(name,reads,writes,transferTime));
}
return monitorInfoBean;
}
@Override
public void listen(IMonitorServiceListener listener, Long interval) {
schedule(new TimerTask() {
@Override
public void run() {
boolean refresh = listener.refresh(getMonitorInfoBean());
if (!refresh) {
this.cancel();
}
}
},new Date(),interval);
}
}
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vcs.changes;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vcs.AbstractVcs;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.FileStatus;
import com.intellij.openapi.vcs.history.VcsRevisionNumber;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.BeforeAfter;
import com.intellij.util.ThreeState;
import com.intellij.vcsUtil.VcsUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public class ChangeListsIndexes {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.vcs.changes.ChangeListsIndexes");
private final Map<FilePath, Data> myMap;
private final TreeSet<FilePath> myAffectedPaths;
private final Set<Change> myChanges;
public ChangeListsIndexes() {
myMap = new HashMap<>();
myAffectedPaths = new TreeSet<>(HierarchicalFilePathComparator.SYSTEM_CASE_SENSITIVE);
myChanges = new HashSet<>();
}
public ChangeListsIndexes(@NotNull ChangeListsIndexes idx) {
myMap = new HashMap<>(idx.myMap);
myAffectedPaths = new TreeSet<>(idx.myAffectedPaths);
myChanges = new HashSet<>(idx.myChanges);
}
public void copyFrom(@NotNull ChangeListsIndexes idx) {
myMap.clear();
myAffectedPaths.clear();
myChanges.clear();
myMap.putAll(idx.myMap);
myAffectedPaths.addAll(idx.myAffectedPaths);
myChanges.addAll(idx.myChanges);
}
private void add(@NotNull FilePath file, @NotNull FileStatus status, @Nullable AbstractVcs key, @NotNull VcsRevisionNumber number) {
myMap.put(file, new Data(status, key, number));
myAffectedPaths.add(file);
if (LOG.isDebugEnabled()) {
LOG.debug("Set status " + status + " for " + file);
}
}
private void remove(final FilePath file) {
myMap.remove(file);
myAffectedPaths.remove(file);
}
@Nullable
public FileStatus getStatus(@NotNull VirtualFile file) {
return getStatus(VcsUtil.getFilePath(file));
}
@Nullable
public FileStatus getStatus(@NotNull FilePath file) {
Data data = myMap.get(file);
return data != null ? data.status : null;
}
public void changeAdded(@NotNull Change change, AbstractVcs key) {
myChanges.add(change);
ContentRevision afterRevision = change.getAfterRevision();
ContentRevision beforeRevision = change.getBeforeRevision();
if (beforeRevision != null && afterRevision != null) {
add(afterRevision.getFile(), change.getFileStatus(), key, beforeRevision.getRevisionNumber());
if (!Comparing.equal(beforeRevision.getFile(), afterRevision.getFile())) {
add(beforeRevision.getFile(), FileStatus.DELETED, key, beforeRevision.getRevisionNumber());
}
}
else if (afterRevision != null) {
add(afterRevision.getFile(), change.getFileStatus(), key, VcsRevisionNumber.NULL);
}
else if (beforeRevision != null) {
add(beforeRevision.getFile(), change.getFileStatus(), key, beforeRevision.getRevisionNumber());
}
}
public void changeRemoved(@NotNull Change change) {
myChanges.remove(change);
ContentRevision afterRevision = change.getAfterRevision();
ContentRevision beforeRevision = change.getBeforeRevision();
if (afterRevision != null) {
remove(afterRevision.getFile());
}
if (beforeRevision != null) {
remove(beforeRevision.getFile());
}
}
@NotNull
public Set<Change> getChanges() {
return myChanges;
}
@Nullable
public AbstractVcs getVcsFor(@NotNull Change change) {
AbstractVcs vcs = getVcsForRevision(change.getAfterRevision());
if (vcs != null) return vcs;
return getVcsForRevision(change.getBeforeRevision());
}
@Nullable
private AbstractVcs getVcsForRevision(@Nullable ContentRevision revision) {
if (revision != null) {
Data data = myMap.get(revision.getFile());
return data != null ? data.vcs : null;
}
return null;
}
/**
* this method is called after each local changes refresh and collects all:
* - paths that are new in local changes
* - paths that are no more changed locally
* - paths that were and are changed, but base revision has changed (ex. external update)
* (for RemoteRevisionsCache and annotation listener)
*/
public void getDelta(ChangeListsIndexes newIndexes,
Set<BaseRevision> toRemove,
Set<BaseRevision> toAdd,
Set<BeforeAfter<BaseRevision>> toModify) {
Map<FilePath, Data> oldMap = myMap;
Map<FilePath, Data> newMap = newIndexes.myMap;
for (Map.Entry<FilePath, Data> entry : oldMap.entrySet()) {
FilePath s = entry.getKey();
Data oldData = entry.getValue();
Data newData = newMap.get(s);
if (newData != null) {
if (!oldData.sameRevisions(newData)) {
toModify.add(new BeforeAfter<>(createBaseRevision(s, oldData), createBaseRevision(s, newData)));
}
}
else {
toRemove.add(createBaseRevision(s, oldData));
}
}
for (Map.Entry<FilePath, Data> entry : newMap.entrySet()) {
FilePath s = entry.getKey();
Data newData = entry.getValue();
if (!oldMap.containsKey(s)) {
toAdd.add(createBaseRevision(s, newData));
}
}
}
@NotNull
public ThreeState haveChangesUnder(@NotNull FilePath dir) {
FilePath changeCandidate = myAffectedPaths.ceiling(dir);
if (changeCandidate == null) return ThreeState.NO;
return FileUtil.isAncestorThreeState(dir.getPath(), changeCandidate.getPath(), false);
}
private static BaseRevision createBaseRevision(@NotNull FilePath path, @NotNull Data data) {
return new BaseRevision(data.vcs, data.revision, path);
}
public void clear() {
myMap.clear();
myAffectedPaths.clear();
myChanges.clear();
}
@NotNull
public Set<FilePath> getAffectedPaths() {
return Collections.unmodifiableSet(myMap.keySet());
}
private static class Data {
@NotNull public final FileStatus status;
public final AbstractVcs vcs;
@NotNull public final VcsRevisionNumber revision;
public Data(@NotNull FileStatus status, AbstractVcs vcs, @NotNull VcsRevisionNumber revision) {
this.status = status;
this.vcs = vcs;
this.revision = revision;
}
public boolean sameRevisions(@NotNull Data data) {
return Comparing.equal(vcs, data.vcs) && Comparing.equal(revision, data.revision);
}
}
}
|
/*
* Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.oauth2.client.authentication;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.oltu.oauth2.common.OAuth;
import org.wso2.carbon.identity.oauth.common.exception.InvalidOAuthClientException;
import org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception;
import org.wso2.carbon.identity.oauth2.bean.OAuthClientAuthnContext;
import org.wso2.carbon.identity.oauth2.util.OAuth2Util;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
/**
* This class is dedicated for authenticating 'Public Clients'. Public clients do not need a client secret to be
* authenticated. This type of authentication is regularly utilised by native OAuth2 clients.
*/
public class PublicClientAuthenticator extends AbstractOAuthClientAuthenticator {
private static final Log log = LogFactory.getLog(PublicClientAuthenticator.class);
/**
* Returns the execution order of this authenticator.
*
* @return Execution place within the order.
*/
@Override
public int getPriority() {
return 200;
}
/**
* Authenticates the client.
*
* @param request HttpServletRequest which is the incoming request.
* @param bodyParams Body parameter map of the request.
* @param oAuthClientAuthnContext OAuth client authentication context.
* @return Whether the authentication is successful or not.
* @throws OAuthClientAuthnException
*/
@Override
public boolean authenticateClient(HttpServletRequest request, Map<String, List> bodyParams, OAuthClientAuthnContext
oAuthClientAuthnContext) {
return true;
}
/**
* Returns whether the incoming request can be authenticated or not using the given inputs.
*
* @param request HttpServletRequest which is the incoming request.
* @param bodyParams Body parameters present in the request.
* @param context OAuth2 client authentication context.
* @return True if can be authenticated, False otherwise.
*/
@Override
public boolean canAuthenticate(HttpServletRequest request, Map<String, List> bodyParams, OAuthClientAuthnContext
context) {
String clientId = getClientId(request, bodyParams, context);
try {
if (isClientIdExistsAsParams(bodyParams)) {
if (canBypassClientCredentials(context.getClientId())) {
if (clientId != null) {
context.setClientId(clientId);
}
return true;
} else {
if (log.isDebugEnabled()) {
log.debug("The Application (Service Provider) with client ID : " + clientId
+ " has not enabled the option \"Allow authentication without the client secret\" "
+ "and no valid Authorization Header exists in the request.");
}
}
} else {
if (log.isDebugEnabled()) {
log.debug("Application with the given client ID " + clientId + " is not found");
}
}
} catch (InvalidOAuthClientException e) {
log.error("Error in retrieving an Application (Service Provider) with client ID : " + clientId, e);
} catch (IdentityOAuth2Exception e) {
log.error("Error in Application (Service Provider) with client ID : " + clientId, e);
}
if (log.isDebugEnabled()) {
log.debug("The Client ID is not present in the request.");
}
return false;
}
/**
* Get the name of the OAuth2 client authenticator.
*
* @return The name of the OAuth2 client authenticator.
*/
@Override
public String getName() {
return "PublicClientAuthenticator";
}
/**
* Retrieves the client ID which is extracted from incoming request.
*
* @param request HttpServletRequest.
* @param bodyParams Body paarameter map of the incoming request.
* @param oAuthClientAuthnContext OAuthClientAuthentication context.
* @return Client ID of the OAuth2 client.
* @throws OAuthClientAuthnException OAuth client authentication exception.
*/
@Override
public String getClientId(HttpServletRequest request, Map<String, List> bodyParams, OAuthClientAuthnContext
oAuthClientAuthnContext) {
if (StringUtils.isBlank(oAuthClientAuthnContext.getClientId())) {
setClientCredentialsFromParam(bodyParams, oAuthClientAuthnContext);
}
return oAuthClientAuthnContext.getClientId();
}
/**
* Checks if the client can bypass credentials.
*
* @param clientId Client ID
* @return True is the client can bypass credentials, False otherwise.
* @throws IdentityOAuth2Exception OAuth2 exception.
* @throws InvalidOAuthClientException Invalid OAuth2 client exception.
*/
private boolean canBypassClientCredentials(String clientId) throws IdentityOAuth2Exception,
InvalidOAuthClientException {
return OAuth2Util.getAppInformationByClientId(clientId).isBypassClientCredentials();
}
/**
* Checks for the client ID in body parameters.
*
* @param contentParam Request body parameters.
* @return True if client ID exists as a body parameter, false otherwise.
*/
private boolean isClientIdExistsAsParams(Map<String, List> contentParam) {
Map<String, String> stringContent = getBodyParameters(contentParam);
return (StringUtils.isNotEmpty(stringContent.get(OAuth.OAUTH_CLIENT_ID)));
}
/**
* Sets client id from body parameters to the OAuth client authentication context.
*
* @param params Body parameters of the incoming request.
* @param context OAuth client authentication context.
*/
private void setClientCredentialsFromParam(Map<String, List> params, OAuthClientAuthnContext context) {
Map<String, String> stringContent = getBodyParameters(params);
context.setClientId(stringContent.get(OAuth.OAUTH_CLIENT_ID));
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.example.quickstart;
import org.apache.rocketmq.client.exception.MQClientException;
import org.apache.rocketmq.client.producer.DefaultMQProducer;
import org.apache.rocketmq.client.producer.SendResult;
import org.apache.rocketmq.common.message.Message;
import org.apache.rocketmq.remoting.common.RemotingHelper;
/**
* This class demonstrates how to send messages to brokers using provided {@link DefaultMQProducer}.
*/
public class Producer {
public static void main(String[] args) throws MQClientException, InterruptedException {
DefaultMQProducer producer = new DefaultMQProducer("please_rename_unique_group_name");
/*
* Specify name server addresses.
* <p/>
*
* Alternatively, you may specify name server addresses via exporting environmental variable: NAMESRV_ADDR
* <pre>
* {@code
* producer.setNamesrvAddr("name-server1-ip:9876;name-server2-ip:9876");
* }
* </pre>
*/
/*
* Launch the instance.
*/
producer.start();
for (int i = 0; i < 1000; i++) {
try {
/*
* Create a message instance, specifying topic, tag and message body.
*/
Message msg = new Message("TopicTest" /* Topic */,
"TagA" /* Tag */,
("Hello RocketMQ " + i).getBytes(RemotingHelper.DEFAULT_CHARSET) /* Message body */
);
/*
* Call send message to deliver message to one of brokers.
*/
SendResult sendResult = producer.send(msg);
System.out.printf("%s%n", sendResult);
} catch (Exception e) {
e.printStackTrace();
Thread.sleep(1000);
}
}
/*
* Shut down once the producer instance is not longer in use.
*/
producer.shutdown();
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.mycompany.proyecto5;
/**
*
* @author Córdova
*/
public class V_Aereo extends Vehiculos{
private double altitud = 0.0;
public double getAltitud() {
return altitud;
}
public void setAltitud(double altitud) {
this.altitud = altitud;
}
@Override
public void encender(){
System.out.println("Encendiendo avión");
}
}
|
package com.yammer.metrics.spring;
import com.yammer.metrics.annotation.Timed;
public class ProxyTargetClass implements UselessInterface {
@Timed
public void timed() {}
}
|
package references.references;
public class StringReference {
public char[] string;
}
|
package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* 思蓝灰度Zone测试接口one
*
* @author auto create
* @since 1.0, 2018-07-12 16:34:09
*/
public class AlipayOpenAppSilanApigrayoneQueryModel extends AlipayObject {
private static final long serialVersionUID = 8236381547229236434L;
/**
* param
*/
@ApiField("param_1")
private String param1;
public String getParam1() {
return this.param1;
}
public void setParam1(String param1) {
this.param1 = param1;
}
}
|
package cern.enice.jira.amh.baseruleset.rulesets;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import cern.enice.jira.amh.api.JiraCommunicator;
import cern.enice.jira.amh.api.LogProvider;
import cern.enice.jira.amh.api.MailService;
import cern.enice.jira.amh.api.RuleSet;
import cern.enice.jira.amh.baseruleset.Configuration;
import cern.enice.jira.amh.baseruleset.RuleSetUtils;
import cern.enice.jira.amh.dto.EMail;
import cern.enice.jira.amh.dto.IssueDescriptor;
import cern.enice.jira.amh.utils.EmailHandlingException;
public class IssueKeyRuleSet implements RuleSet {
public static final String REGEX_ISSUEKEY = "\\b[A-Za-z]{2,}-[0-9]+\\b";
// Service dependencies
private volatile LogProvider logger;
private volatile JiraCommunicator jiraCommunicator;
@SuppressWarnings("unused")
private volatile MailService mailService;
private volatile Configuration configuration;
private volatile RuleSetUtils ruleSetUtils;
/**
* OSGi lifecycle callback method which is called when BaseRuleSet service is started
*/
public void start() {
logger.log(LogProvider.INFO, "IssueKeyRuleSet is started.");
}
/**
* OSGi lifecycle callback method which is called when BaseRuleSet service is stopped
*/
public void stop() {
logger.log(LogProvider.INFO, "IssueKeyRuleSet is stopped.");
}
/**
* Checks if issue is to be created or updated: sets issue key and
* queries its original state in the latter case.
* Throws RuleSetProcessingException if create or update operation is not allowed.
*/
@Override
public void process(EMail email, Map<String, String> tokens, IssueDescriptor issueDescriptor)
throws EmailHandlingException {
String subjectWithoutTokens = ruleSetUtils.getSubjectWithoutTokens(email.getSubject());
if (subjectWithoutTokens == null || subjectWithoutTokens.trim().isEmpty())
return;
Pattern issueKeyPattern = Pattern.compile(REGEX_ISSUEKEY);
Matcher matcher = issueKeyPattern.matcher(subjectWithoutTokens);
if (matcher.find()) {
String issueKey = matcher.group().trim().toUpperCase();
String[] issueKeyParts = issueKey.split("-");
if (jiraCommunicator.isValidProject(issueKeyParts[0])) {
issueDescriptor.setKey(issueKey);
}
}
String allowedOperations = configuration.getAllowedOperations();
String issueKey = issueDescriptor.getKey();
if (issueKey == null && !allowedOperations.contains(Configuration.CREATE))
throw new EmailHandlingException("Creating issue is not allowed.");
if (issueKey != null && !allowedOperations.contains(Configuration.UPDATE))
throw new EmailHandlingException("Updating issue is not allowed.");
IssueDescriptor originalIssueStateDescriptor = issueDescriptor.getOriginalState();
if (issueKey == null) {
issueDescriptor.setOriginalState(new IssueDescriptor());
} else if (originalIssueStateDescriptor.getKey() == null) {
originalIssueStateDescriptor = jiraCommunicator.getIssue(issueKey);
issueDescriptor.setOriginalState(originalIssueStateDescriptor);
}
}
}
|
/**
* Copyright (c) 2017-2018, Salesforce.com, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* * Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
* USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.salesforce.kafka.test;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
class KafkaBrokerTest {
/**
* Tests the start() pass through method.
*/
@Test
void testStart() throws Exception {
// Create mock.
final KafkaTestServer mockServer = createMockServer(1, "PLAINTEXT://localhost:1234");
// Build brokers
final KafkaBroker kafkaBroker = new KafkaBroker(mockServer);
// Call start.
kafkaBroker.start();
// Verify mock was called.
verify(mockServer, times(1)).start();
}
/**
* Tests the stop() pass through method.
*/
@Test
void testStop() throws Exception {
// Create mock.
final KafkaTestServer mockServer = createMockServer(1, "PLAINTEXT://localhost:1234");
// Build brokers
final KafkaBroker kafkaBroker = new KafkaBroker(mockServer);
// Call stop.
kafkaBroker.stop();
// Verify mock was called.
verify(mockServer, times(1)).stop();
}
/**
* Tests the getBrokerId() pass through method.
*/
@Test
void testGetBrokerId() {
final int brokerId = 1;
final String connectString = "PLAINTEXT://localhost:12345";
// Create mock.
final KafkaTestServer mockServer = createMockServer(brokerId, connectString);
// Build brokers
final KafkaBroker kafkaBroker = new KafkaBroker(mockServer);
Assertions.assertEquals(brokerId, kafkaBroker.getBrokerId());
// Verify mock was called.
verify(mockServer, times(1)).getBrokerId();
}
/**
* Tests the getConnectString() pass through method.
*/
@Test
void testGetConnectString() {
final int brokerId = 1;
final String connectString = "PLAINTEXT://localhost:12345";
// Create mock.
final KafkaTestServer mockServer = createMockServer(brokerId, connectString);
// Build brokers
final KafkaBroker kafkaBroker = new KafkaBroker(mockServer);
Assertions.assertEquals(connectString, kafkaBroker.getConnectString());
// Verify mock was called.
verify(mockServer, times(1)).getKafkaConnectString();
}
/**
* Helper method to build a mock KafkaTestServer instance.
* @param brokerId the brokerId to setup mock to use.
* @return mock KafkaTestServer instance.
*/
private KafkaTestServer createMockServer(final int brokerId, final String connectString) {
// Create mocks
final KafkaTestServer mockServer = mock(KafkaTestServer.class);
when(mockServer.getBrokerId()).thenReturn(brokerId);
when(mockServer.getKafkaConnectString()).thenReturn(connectString);
return mockServer;
}
}
|
package org.xdef.impl;
import java.lang.reflect.Method;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.Locale;
import java.util.StringTokenizer;
import javax.xml.XMLConstants;
import org.w3c.dom.Element;
import org.xdef.XDEmailAddr;
import org.xdef.XDException;
import org.xdef.XDInetAddr;
import org.xdef.XDOutput;
import org.xdef.XDParseResult;
import org.xdef.XDParser;
import org.xdef.XDReport;
import org.xdef.XDService;
import org.xdef.XDValue;
import org.xdef.XDValueID;
import org.xdef.XDXmlOutStream;
import org.xdef.impl.code.CodeDisplay;
import org.xdef.impl.code.CodeExtMethod;
import org.xdef.impl.code.CodeTable;
import org.xdef.impl.code.DefBNFGrammar;
import org.xdef.impl.code.DefBigInteger;
import org.xdef.impl.code.DefBoolean;
import org.xdef.impl.code.DefBytes;
import org.xdef.impl.code.DefContainer;
import org.xdef.impl.code.DefDate;
import org.xdef.impl.code.DefDecimal;
import org.xdef.impl.code.DefDouble;
import org.xdef.impl.code.DefDuration;
import org.xdef.impl.code.DefElement;
import org.xdef.impl.code.DefException;
import org.xdef.impl.code.DefGPSPosition;
import org.xdef.impl.code.DefInStream;
import org.xdef.impl.code.DefLocale;
import org.xdef.impl.code.DefLong;
import org.xdef.impl.code.DefNamedValue;
import org.xdef.impl.code.DefNull;
import org.xdef.impl.code.DefObject;
import org.xdef.impl.code.DefOutStream;
import org.xdef.impl.code.DefParseResult;
import org.xdef.impl.code.DefPrice;
import org.xdef.impl.code.DefReport;
import org.xdef.impl.code.DefSQLService;
import org.xdef.impl.code.DefString;
import org.xdef.impl.code.DefXmlWriter;
import org.xdef.impl.compile.CompileBase;
import org.xdef.impl.ext.XExtUtils;
import org.xdef.msg.XDEF;
import org.xdef.proc.XXElement;
import org.xdef.proc.XXException;
import org.xdef.proc.XXNode;
import org.xdef.sys.GPSPosition;
import org.xdef.sys.Price;
import org.xdef.sys.Report;
import org.xdef.sys.SBuffer;
import org.xdef.sys.SDatetime;
import org.xdef.sys.SDuration;
import org.xdef.sys.SError;
import org.xdef.sys.SRuntimeException;
import org.xdef.sys.SUtils;
import org.xdef.sys.StringParser;
import org.xdef.xml.KXmlUtils;
/** Provides invoking of external method from script code.
* @author Vaclav Trojan
*/
final class XCodeProcessorExt implements CodeTable, XDValueID {
static final XDValue perform1v(final XDValue item, final XDValue p) {
switch (item.getCode()) {
case GET_TYPEID: //get type of a value (as integer type id)
return new DefLong(p.getItemId());
case GET_TYPENAME: // get name of type of a value
return new DefString(
CompileBase.getTypeName(p.getItemId()));
case CHECK_TYPE: // check type conversion
if (p != null && !p.isNull() &&
p.getItemId() != item.getParam()) {
switch (p.getItemId()) {
case XD_BOOLEAN:
return new DefBoolean(p.booleanValue());
case XD_LONG:
return new DefLong(p.intValue());
case XD_DOUBLE:
return new DefDouble(p.floatValue());
case XD_DECIMAL:
return new DefDecimal(p.decimalValue());
case XD_BIGINTEGER:
return new DefBigInteger(p.integerValue());
case XD_DATETIME:
return new DefDate(p.datetimeValue());
case XD_DURATION:
return new DefDuration(p.durationValue());
case XD_STRING:
return new DefString(p.stringValue());
case XD_ELEMENT:
if (item.getParam() == XD_CONTAINER) {
return new DefContainer(p);
}
case XD_EMAIL:
return (XDEmailAddr) p;
case XD_INETADDR:
return (XDInetAddr) p;
}
//Icorrect type conversion from AnyValue
throw new SRuntimeException(XDEF.XDEF536);
}
return p;
case BYTES_CLEAR: //Clear byte array
((DefBytes) p).clear();
return p;
case BYTES_SIZE: //size of byte array
return new DefLong(((DefBytes) p).size());
case BYTES_TO_BASE64:
return new DefString(((DefBytes) p).getBase64());
case BYTES_TO_HEX:
return new DefString(((DefBytes) p).getHex());
case PARSE_DURATION: //Duration
try {
return new DefDuration(p.toString());
} catch (Exception ex) {
return DefNull.genNullValue(XD_DURATION);
}
case DURATION_GETYEARS:
return p == null || p.isNull()
? new DefLong(-1)
: new DefLong(p.durationValue().getYears());
case DURATION_GETMONTHS:
return p == null || p.isNull()
? new DefLong(-1)
: new DefLong(p.durationValue().getMonths());
case DURATION_GETDAYS:
return p == null || p.isNull()
? new DefLong(-1)
: new DefLong(p.durationValue().getDays());
case DURATION_GETHOURS:
return p == null || p.isNull()
? new DefLong(-1)
: new DefLong(p.durationValue().getHours());
case DURATION_GETMINUTES:
return p == null || p.isNull()
? new DefLong(-1)
: new DefLong(p.durationValue().getMinutes());
case DURATION_GETSECONDS:
return p == null || p.isNull()
? new DefLong(-1)
: new DefLong(p.durationValue().getSeconds());
case DURATION_GETRECURRENCE:
return p == null || p.isNull()
? new DefLong(-1)
: new DefLong(p.durationValue().getRecurrence());
case DURATION_GETFRACTION:
return p == null || p.isNull()
? new DefDouble(-1)
: new DefDouble(p.durationValue().getFraction());
case DURATION_GETSTART:
return p == null || p.isNull()
? new DefDate()
: new DefDate(p.durationValue().getStart());
case DURATION_GETEND:
return p == null || p.isNull()
? new DefDate()
: new DefDate(p.durationValue().getEnd());
case DURATION_GETNEXTTIME:
return p == null || p.isNull()
? new DefDate()
: new DefDate(p.durationValue().getNextTime());
//Element
case ELEMENT_CHILDNODES: {
Element el;
return p == null || p.isNull() || (el = p.getElement()) == null
? new DefContainer() : new DefContainer(el.getChildNodes());
}
case ELEMENT_NAME: {
Element el;
return p == null || p.isNull() || (el = p.getElement()) == null
? new DefString(null) : new DefString(el.getTagName());
}
case ELEMENT_NSURI: {
Element el;
return p == null || p.isNull() || (el = p.getElement()) == null
? new DefString(null) : new DefString(el.getNamespaceURI());
}
//ParseResult
case GET_PARSED_STRING:
return new DefString(((DefParseResult) p).getSourceBuffer());
//datetime
case GET_DAY: //Get day from date
return new DefLong(p.datetimeValue().getDay());
case GET_WEEKDAY: //Get week day
return new DefLong(p.datetimeValue().getDayOfWeek());
case GET_MONTH: //Get month from date
return new DefLong(p.datetimeValue().getMonth());
case GET_YEAR: //Get year from date
return new DefLong(p.datetimeValue().getYear());
case GET_HOUR: //Get hour from date
return new DefLong(p.datetimeValue().getHour());
case GET_MINUTE: //Get minute from date
return new DefLong(p.datetimeValue().getMinute());
case GET_SECOND: //Get second
return new DefLong(p.datetimeValue().getSecond());
case GET_MILLIS: //Get millisecond
return new DefLong(p.datetimeValue().getMillisecond());
case GET_NANOS:
return new DefLong(p.datetimeValue().getNanos());
case GET_FRACTIONSECOND: //get fraction of second
return new DefDouble(p.datetimeValue().getFraction());
case GET_EASTERMONDAY:
return new DefDate(p.getItemId() == XD_DATETIME
? p.datetimeValue().getEasterMonday()
: SDatetime.getEasterMonday(p.intValue()));
case GET_LASTDAYOFMONTH:
return new DefLong(
SDatetime.getLastDayOfMonth(p.datetimeValue()));
case GET_DAYTIMEMILLIS: //get daytime
return new DefLong(p.datetimeValue().getDaytimeInMillis());
case GET_ZONEOFFSET: //zone shift to GMT
return new DefLong(p.datetimeValue().getTimeZoneOffset());
case GET_ZONEID: //get time zone name
return new DefString(p.datetimeValue().getTZ().getID());
case IS_LEAPYEAR: //check leap year.
// Return true if date is leap year.
return new DefBoolean(SDatetime.isLeapYear(
p.getItemId() == XD_LONG
? p.intValue() : p.datetimeValue().getYear()));
//String
case LOWERCASE: { //set to lower case
String s = p.stringValue();
return s != null ? new DefString(s.toLowerCase()) : p;
}
case UPPERCASE: {//set to upper case
String s = p.stringValue();
return s != null ? new DefString(s.toUpperCase()) : p;
}
case TRIM_S: {
String s = p.stringValue();
return s != null ? new DefString(s.trim()) : p;
}
case GET_STRING_LENGTH: { //s.length()
String s = p.stringValue();
return new DefLong(s != null ? s.length() : 0);
}
case WHITESPACES_S: {
StringBuilder s=new StringBuilder(p.toString().trim());
for (int i = s.length() -1; i >= 0; i--) {
char c;
int j = i;
while (j >= 0 && ((c = s.charAt(j)) == ' ' ||
c == '\n' || c == '\t' || c == '\r')) {
j--;
}
if (j < i) {
s.replace(j+1,i+1, " ");
i = j;
}
}
return new DefString(s.toString());
}
//Report
case GET_REPORT: {
Report rep;
if (p.isNull()) {
rep = null;
} else if (p.getItemId() == XD_EXCEPTION) {
rep = ((XDException) p).reportValue();
} else {
XDOutput out = (XDOutput) p;
rep = out.getLastErrorReport();
}
return new DefReport(rep);
}
}
return null;
}
static final void perform2(final XDValue cmd,
final XDValue p1,
final XDValue p2) {
switch (cmd.getCode()) {
//Element
case ELEMENT_ADDELEMENT: { // Add element to element as child
Element el1 = p1.getElement();
el1.appendChild(
el1.getOwnerDocument().importNode(p2.getElement(), true));
return;
}
case ELEMENT_ADDTEXT: { // Add text to element as child
Element el = p1.getElement();
String s = p2.stringValue();
if (s != null && !s.isEmpty()) {
el.appendChild(el.getOwnerDocument().createTextNode(s));
}
return;
}
//Bytes
case BYTES_ADDBYTE: //Add byte
((DefBytes) p1).add(p2.intValue());
return;
//Report
case PUT_REPORT:
if (!p2.isNull()) {
((XDOutput) p1).putReport(((XDReport)p2).reportValue());
}
return;
//XmlWriter
case SET_XMLWRITER_INDENTING: // Set writer indenting.
((XDXmlOutStream) p1).setIndenting(p2.booleanValue());
return;
case WRITE_TEXTNODE: // Write text node.
((XDXmlOutStream) p1).writeText(p2.stringValue());
}
}
static final XDValue perform2v(final XDValue cmd,
final XDValue p1,
final XDValue p2) {
switch (cmd.getCode()) {
//formating number to string
case INTEGER_FORMAT:
case FLOAT_FORMAT: {
String s = p2.toString();
int ndx;
DecimalFormat ds;
if (s.length() > 2 && s.startsWith("{L(") &&
(ndx = s.indexOf(")}")) > 0) {
StringTokenizer st =
new StringTokenizer(s.substring(3,ndx), " \n\t\r,");
s = s.substring(ndx + 2);
ds = new DecimalFormat(s);
String s1 = st.nextToken().toLowerCase();
String s2 =
(st.hasMoreTokens() ? st.nextToken():"").toUpperCase();
DecimalFormatSymbols dfs = new DecimalFormatSymbols(
st.hasMoreTokens() ?
new Locale(s1,s2,st.nextToken()) :
new Locale(s1,s2));
ds.setDecimalFormatSymbols(dfs);
} else {
ds = new DecimalFormat(s);
if (cmd.getCode() == FLOAT_FORMAT) {
DecimalFormatSymbols df = ds.getDecimalFormatSymbols();
df.setDecimalSeparator('.');
ds.setDecimalFormatSymbols(df);
}
}
return new DefString(cmd.getCode() == INTEGER_FORMAT ?
ds.format(p1.longValue()):ds.format(p1.doubleValue()));
}
//Bytes
case BYTES_GETAT: //Get byte at position
return new DefLong(((DefBytes) p1).getAt(p2.intValue()));
//datetime
case DATE_FORMAT: //format date
return new DefString(
p1.datetimeValue().formatDate(p2.toString()));
case ADD_DAY: //Add days to date.
return new DefDate(p1.datetimeValue().add(
0, 0, p2.intValue(), 0, 0, 0, 0.0));
case ADD_MONTH: //Add months to date.
return new DefDate(p1.datetimeValue().add(
0, p2.intValue(), 0, 0, 0, 0, 0.0));
case ADD_YEAR: //Add years to date.
return new DefDate(p1.datetimeValue().add(
p2.intValue(), 0, 0, 0, 0, 0, 0.0));
case ADD_HOUR: //Add hours to date.
return new DefDate(p1.datetimeValue().add(
0, 0, 0, p2.intValue(), 0, 0, 0.0));
case ADD_MINUTE: //Add minutes to date.
return new DefDate(p1.datetimeValue().add(
0, 0, 0, 0, p2.intValue(), 0, 0.0));
case ADD_SECOND: //Add seconds to date.
return new DefDate(p1.datetimeValue().add(
0, 0, 0, 0, 0, p2.intValue(), 0.0));
case ADD_MILLIS: {//Add millisecs to date.
long amount = p2.longValue();
return new DefDate(p1.datetimeValue().add(0, 0, 0, 0, 0,
(int) amount/1000, (amount%1000)/1000.0));
}
case ADD_NANOS: {//Add nanosecs to date.
long amount = p2.longValue();
return new DefDate(p1.datetimeValue().add(0, 0, 0, 0, 0,
(int) (amount / 1000000000L),
(amount % 1000000000L)/1000000000.0));
}
case SET_DAY: {//Set day from date.
SDatetime t = p1.datetimeValue();
t.setDay(p2.intValue());
return new DefDate(t);
}
case SET_MONTH: {//Set month from date.
SDatetime t = p1.datetimeValue();
t.setMonth(p2.intValue());
return new DefDate(t);
}
case SET_YEAR: {//Set year from date.
SDatetime t = p1.datetimeValue();
t.setYear(p2.intValue());
return new DefDate(t);
}
case SET_HOUR: {//Set hour from date.
SDatetime t = p1.datetimeValue();
t.setHour(p2.intValue());
return new DefDate(t);
}
case SET_MINUTE: {//Set minute from date.
SDatetime t = p1.datetimeValue();
t.setMinute(p2.intValue());
return new DefDate(t);
}
case SET_SECOND: {//Set second from date.
SDatetime t = p1.datetimeValue();
t.setSecond(p2.intValue());
return new DefDate(t);
}
case SET_MILLIS: {//Set millisecond.
SDatetime t = p1.datetimeValue();
t.setMillisecond(p2.intValue());
return new DefDate(t);
}
case SET_NANOS: {//Set nanosecond.
SDatetime t = p1.datetimeValue();
t.setNanos(p2.intValue());
return new DefDate(t);
}
case SET_FRACTIONSECOND: {//Set fraction of second.
SDatetime t = p1.datetimeValue();
t.setFraction(p2.doubleValue());
return new DefDate(t);
}
case SET_DAYTIMEMILLIS: {
SDatetime t = p1.datetimeValue();
t.setDaytimeInMillis(p2.intValue());
return new DefDate(t);
}
case SET_ZONEOFFSET: {//shift to GMT
SDatetime t = p1.datetimeValue();
t.setTimeZoneOffset(p2.intValue());
return new DefDate(t);
}
case SET_ZONEID: { //Set time zone name
SDatetime t = p1.datetimeValue();
t.setTimeZoneID(p2.stringValue());
return new DefDate(t);
}
//String
case GET_STRING_TAIL: {//tail(s,i);
int i = p2.intValue();
String s = p1.stringValue();
if (s != null) {
int j = s.length() - i;
if (j > 0) {
s = s.substring(j);
}
}
return new DefString(s);
}
case GET_SUBSTRING: {//s.substring(i);
int i = p2.intValue();
String s = p1.stringValue();
return (s != null && s.length() > i) ?
new DefString(s.substring(i)) : new DefString("");
}
case CUT_STRING: {//cut(s,i);
int i = p2.intValue();
String s = p1.stringValue();
return (s != null && s.length() > i) ?
new DefString(s.substring(0, i)) : new DefString(s);
}
//Report
case REPORT_TOSTRING:
return new DefString(
((XDReport) p2).toString(p1.stringValue()));
case REPORT_GETPARAM:
return new DefString(
((XDReport) p1).getParameter(p2.stringValue()));
//constructors
case NEW_NAMEDVALUE:
return new DefNamedValue(p1.stringValue(),p2);
}
return null;
}
/** Execute external method (parameter).
* @param item command to be executed.
* @param sp stack pointer.
* @param stack stack.
* @return new value of stack pointer.
* @throws Exception
*/
static final int perform(final XCodeProcessor cp,
final XDValue item,
final int sp1,
final XDValue[] stack) throws Exception {
int sp = sp1;
switch (item.getCode()) {
//Bytes
case BYTES_INSERT: {//Insert byte before
int b = stack[sp--].intValue();
int pos = stack[sp--].intValue();
((DefBytes) stack[sp--]).insertBefore(pos, b);
return sp;
}
case BYTES_REMOVE: {//remove byte(s)
int size = item.getParam() == 2 ? 1 : stack[sp--].intValue();
int pos = stack[sp--].intValue();
((DefBytes) stack[sp--]).remove(pos, size);
return sp;
}
case BYTES_SETAT: {//set byte at position
int b = stack[sp--].intValue();
int pos = stack[sp--].intValue();
((DefBytes) stack[sp--]).setAt(pos, b);
return sp;
}
case PARSE_DATE: { //parse Datetime
String mask = null;
if (item.getParam() == 2) {
mask = stack[sp].stringValue();
sp--;
}
String s = stack[sp].stringValue();
s = s == null ? "" : s.trim();
StringParser p = cp.getStringParser();
p.setSourceBuffer(s);
boolean parsed =
mask == null ? p.isISO8601Datetime() : p.isDatetime(mask);
stack[sp] = parsed && p.eos() && p.testParsedDatetime()
? new DefDate(p.getParsedSDatetime())
: DefNull.genNullValue(XD_DATETIME);
return sp;
}
// Element
case ELEMENT_TOSTRING: { //Get text value of the element
boolean indent =
item.getParam() == 2 ? stack[sp--].booleanValue() : false;
Element el = stack[sp].getElement();
stack[sp] = el != null
? new DefString(KXmlUtils.nodeToString(el, indent))
: DefNull.genNullValue(XD_ELEMENT);
return sp;
}
case ELEMENT_TOCONTAINER: { // Element to container
DefElement e = new DefElement(stack[sp].getElement());
stack[sp] = e.toContainer();
return sp;
}
case ELEMENT_GETATTR: { // Get attribute of the element
String name = stack[sp--].toString();
String uri = item.getParam() == 3
? stack[sp--].stringValue() : null;
Element el = stack[sp].getElement();
if (uri == null) {
stack[sp] = new DefString(el.getAttribute(name));
} else {
int i = name.indexOf(':');
if (i > 0) {
name = name.substring(i + 1);
}
stack[sp] = new DefString(el.getAttributeNS(uri, name));
}
return sp;
}
case ELEMENT_HASATTR: { // has attribute of the element
String name = stack[sp--].toString();
String uri =
item.getParam() == 3 ? stack[sp--].stringValue() : null;
Element el = stack[sp].getElement();
if (uri == null) {
stack[sp] = new DefBoolean(el.hasAttribute(name));
} else {
int i = name.indexOf(':');
if (i > 0) {
name = name.substring(i + 1);
}
stack[sp] = new DefBoolean(el.hasAttributeNS(uri, name));
}
return sp;
}
case ELEMENT_SETATTR: { // Set attribute to element
String value = stack[sp--].stringValue();
String name = stack[sp--].toString();
String uri =
item.getParam() == 4 ? stack[sp--].stringValue() : null;
Element el = stack[sp--].getElement();
if (uri != null) {
if (value == null) {
int i = name.indexOf(':');
if (i > 0) {
name = name.substring(i + 1);
}
el.removeAttributeNS(uri, name);
} else {
el.setAttributeNS(uri, name, value);
}
} else {
if (value == null) {
el.removeAttribute(name);
} else {
el.setAttribute(name, value);
}
}
return sp;
}
//String
case TRANSLATE_S: // translate(s,t)
case REPLACEFIRST_S: // replaceFirst(s,t)
case REPLACE_S: { // replace(s,t)
String q = stack[sp--].stringValue();
String p = stack[sp--].stringValue();
String s = stack[sp].stringValue();
stack[sp] = new DefString(item.getCode() == TRANSLATE_S ?
SUtils.translate(s,p,q) :
item.getCode() == REPLACEFIRST_S ?
SUtils.modifyFirst(s, p, q) :
SUtils.modifyString(s, p, q));
return sp;
}
case GET_SUBSTRING: {//s.substring(i[,j]);
int j = stack[sp--].intValue();
if (item.getParam() == 2) {//s.substring(i)
String s = stack[sp].stringValue();
stack[sp] = new DefString(s != null && s.length() > j
? s.substring(j) : "");
} else {
int i = stack[sp--].intValue();
String s = stack[sp].stringValue();
stack[sp] = new DefString(s != null && s.length() > i
? s.substring(i, j) : "");
}
return sp;
}
case GET_INDEXOFSTRING:
case GET_LASTINDEXOFSTRING: {
int ndx;
if (item.getParam() == 2) {//s.indexOf(s)
String s = stack[sp--].stringValue();
String t = stack[sp].stringValue();
ndx = item.getCode() == GET_INDEXOFSTRING
? t.indexOf(s) : t.lastIndexOf(s);
} else {//s.indexOf(s, pos)
int i = stack[sp--].intValue();
String s = stack[sp--].stringValue();
String t = stack[sp].stringValue();
ndx = item.getCode() == GET_INDEXOFSTRING
? t.indexOf(s, i) : t.lastIndexOf(s, i);
}
stack[sp] = new DefLong(ndx);
return sp;
}
//Report
case REPORT_SETPARAM: {
String value = stack[sp--].stringValue();
String name = stack[sp--].stringValue();
XDReport x = (XDReport) stack[sp];
stack[sp] = x.setParameter(name, value);
return sp;
}
case REPORT_SETTYPE: {
String s = stack[sp--].toString();
stack[sp] = ((XDReport) stack[sp]).setType(
(byte) (s == null || s.isEmpty() ? 'T' : s.charAt(0)));
return sp;
}
////////////////////////////////////////////////////////////////////////////////
//Constructors
////////////////////////////////////////////////////////////////////////////////
case NEW_CONTAINER: {
int i;
if ((i = item.getParam()) == 0) {
stack[++sp] = DefNull.genNullValue(XD_CONTAINER);
} else {
sp -= i - 1;
stack[sp] = new DefContainer(stack, sp, sp + i - 1);
}
return sp;
}
case NEW_ELEMENT: {
String name = stack[sp].toString();
String uri = item.getParam()==1 ? null : stack[--sp].toString();
Element el =
KXmlUtils.newDocument(uri, name, null).getDocumentElement();
if (uri != null) {
String nsAttr = "xmlns";
int i = name.indexOf(':');
if (i > 0) {
nsAttr += ':' + name.substring(0, i);
}
el.setAttributeNS(
XMLConstants.XMLNS_ATTRIBUTE_NS_URI, nsAttr, uri);
}
stack[sp] = new DefElement(el);
return sp;
}
case NEW_BYTES:
if (item.getParam() == 0) {
stack[++sp] = new DefBytes(new byte[0]);
} else {
stack[sp] =
new DefBytes(new byte[stack[sp].intValue()]);
}
return sp;
case NEW_INSTREAM:
switch (item.getParam()) {
case 3: {
boolean xmlFormat = stack[sp--].booleanValue();
String s = stack[sp--].toString();
stack[sp] = new DefInStream(
stack[sp].toString(), s, xmlFormat);
break;
}
case 2: {
XDValue v = stack[sp--];
if (v.getItemId() == XD_BOOLEAN) {
stack[sp] = new DefInStream(stack[sp].toString(),
v.booleanValue());
} else {
stack[sp] = new DefInStream(stack[sp].toString(),
v.toString(), false);
}
break;
}
default:
stack[sp] =
new DefInStream(stack[sp].toString(), false);
break;
}
return sp;
case NEW_OUTSTREAM: {
switch (item.getParam()) {
case 3: {
boolean xmlFormat = stack[sp--].booleanValue();
String s = stack[sp--].toString();
stack[sp] = new DefOutStream(
stack[sp].toString(),s, xmlFormat);
break;
}
case 2: {
String s = stack[sp--].toString();
stack[sp] = new DefOutStream(stack[sp].toString(), s);
break;
}
default:
stack[sp] = new DefOutStream(stack[sp].toString());
break;
}
return sp;
}
case NEW_BNFGRAMAR: {
int extndx;
DefBNFGrammar y;
String s;
if (item.getParam() == 1) {
extndx = -1;
y = null;
s = stack[sp].toString();
} else {
extndx = 0;
y = (DefBNFGrammar) stack[sp--];
s = stack[sp].toString();
}
try {
//we MUST recompile this with actual data!!!
DefBNFGrammar x = new DefBNFGrammar(y,
extndx, new SBuffer(s), null);
x.setCode(LD_CONST_I); //However, we do it just first time!
stack[sp] = x;
} catch (SRuntimeException ex) {
cp.getTemporaryReporter().putReport(ex.getReport());
}
return sp;
}
case NEW_SERVICE: {
String passw = stack[sp--].stringValue();
String user = stack[sp--].stringValue();
String url = stack[sp--].stringValue();
String service = stack[sp].stringValue();
XDService c;
if ("JDBC".equalsIgnoreCase(service)) {
c = new DefSQLService(url, user, passw);
c.setProperty("autocomit", "yes");
stack[sp] = c;
} else {
throw new SRuntimeException("Unknown service: " + service);
}
return sp;
}
case NEW_XMLWRITER: {
boolean writehdr =
item.getParam() == 3 ? stack[sp--].booleanValue() : true;
String encoding =
item.getParam() >= 2 ? stack[sp--].stringValue() : null;
String name = stack[sp].stringValue();
stack[sp] = new DefXmlWriter(name, encoding, writehdr);
return sp;
}
case NEW_REPORT: {
int numPar = item.getParam();
if (numPar == 1) {
stack[sp] = new DefReport(stack[sp].toString());
} else {
String modification =
numPar == 3 ? stack[sp--].stringValue() : null;
String text = stack[sp--].stringValue();
String id = stack[sp].stringValue();
stack[sp] =
new DefReport(Report.text(id, text, modification));
}
return sp;
}
case NEW_LOCALE: {
int numPar = item.getParam();
if (numPar == 1) {
stack[sp] =
new DefLocale(stack[sp].toString().toLowerCase());
} else if (numPar == 2) {
stack[sp - 1] =
new DefLocale(stack[sp-1].toString().toLowerCase(),
stack[sp].toString().toUpperCase());
sp--;
} else {
stack[sp - 2] =
new DefLocale(stack[sp-2].toString().toLowerCase(),
stack[sp-1].toString().toUpperCase(),
stack[sp].toString());
sp -= 2;
}
return sp;
}
}
return sp;
}
/** Execute command.
* @param cp XCodeProcessor object.
* @param cmd command to be executed.
* @param xNode actually processed node.
* @param sp stack pointer.
* @param stack stack.
* @param pc program counter.
* @return new value of stack pointer.
* @throws Exception
*/
static final int performX(final XCodeProcessor cp,
final XDValue cmd,
final ChkNode chkNode,
final int sp1,
final XDValue[] stack,
final int pc) throws Exception {
int sp = sp1;
switch (cmd.getCode()) {
// Parsing
case PARSE_INT: {
String s = stack[sp].stringValue();
s = s == null ? "" : s.trim();
StringParser p = cp.getStringParser();
p.setSourceBuffer(s);
stack[sp] = p.isSignedInteger() && p.eos()
? new DefLong(p.getParsedLong())
: DefNull.genNullValue(XD_LONG);
return sp;
}
case PARSE_FLOAT: {
String s = stack[sp].stringValue();
s = s == null ? "" : s.trim();
StringParser p = cp.getStringParser();
p.setSourceBuffer(s);
stack[sp] = p.isSignedFloat() && p.eos()
? new DefDouble(p.getParsedDouble())
: DefNull.genNullValue(XD_DOUBLE);
return sp;
}
case GET_PARSED_BOOLEAN:
case GET_PARSED_BYTES:
case GET_PARSED_DECIMAL:
case GET_PARSED_LONG:
case GET_PARSED_DOUBLE:
case GET_PARSED_DATETIME:
case GET_PARSED_DURATION: {
XDParseResult pr = (cmd.getParam() == 1)
? (XDParseResult) stack[sp--] : chkNode._parseResult;
XDValue val = pr.getParsedValue();
if (val == null) {
val = new DefNull();
}
stack[++sp] = val;
return sp;
}
////////////////////////////////////////////////////////////////////////////////
case GET_NS: //getNamespaceURI()
if (cmd.getParam() == 0) {
String s = chkNode.getElement().getNamespaceURI();
if (s == null) {
s = "";
}
stack[++sp] = new DefString(s);
} else {
Element el;
String prefix;
if (cmd.getParam() == 1) {
if (stack[sp].getItemId() == XD_ELEMENT) {
el = ((DefElement) stack[sp]).getElement();
String s = el == null ? "" : el.getNamespaceURI();
if (s == null) {
s = "";
}
stack[sp] = new DefString(s);
return sp;
} else {
prefix = stack[sp].toString();
el = chkNode.getElement();
}
} else {
prefix = stack[sp].toString();
el = ((DefElement) stack[--sp]).getElement();
}
stack[sp] = new DefString(XExtUtils.getNSUri(prefix, el));
}
return sp;
case GET_QNAMEURI: {//getQnameURI(s[,e])
String prefix = stack[sp].isNull()? "" : stack[sp].toString();
Element el = cmd.getParam() == 1 ? chkNode.getElement()
: ((DefElement) stack[--sp]).getElement();
int ndx;
prefix = (ndx = prefix.indexOf(':')) <= 0 ?
"" : prefix.substring(0, ndx);
stack[sp] = new DefString(XExtUtils.getNSUri(prefix, el));
return sp;
}
case WRITE_ELEMENT_START: { // Write element start.
Element el = cmd.getParam() == 2 ?
stack[sp--].getElement() : chkNode.getElement();
((XDXmlOutStream) stack[sp--]).writeElementStart(el);
return sp;
}
case WRITE_ELEMENT_END: {// Write element end tag.
if (cmd.getParam() == 2) {
sp--;
}
((XDXmlOutStream) stack[sp--]).writeElementEnd();
return sp;
}
case WRITE_ELEMENT: {// Write element.
Element el = cmd.getParam() == 2 ?
stack[sp--].getElement() : chkNode.getElement();
((XDXmlOutStream) stack[sp--]).writeNode(el);
return sp;
}
case GET_LASTERROR: {
Report rep = cp.getTemporaryReporter().getLastErrorReport();
if (rep == null) {
rep = chkNode.getReportWriter().getLastErrorReport();
}
stack[++sp] = new DefReport(rep);
return sp;
}
case IS_CREATEMODE: {
stack[++sp] =
new DefBoolean(chkNode.getXDDocument().isCreateMode());
return sp;
}
////////////////////////////////////////////////////////////////////////////////
//Constructors
////////////////////////////////////////////////////////////////////////////////
case NEW_EXCEPTION: {
Report rep;
switch (cmd.getParam()) {
case 1:
rep = Report.error(null, stack[sp].toString());
break;
case 2:
sp--;
rep = Report.error(
stack[sp].toString(), stack[sp].toString());
break;
default:
//if (item.getParam() == 3)
sp -= 2;
rep = Report.error(stack[sp].toString(),
stack[sp + 1].toString(),
stack[sp + 2].toString());
break;
}
stack[sp] = new DefException(rep,
chkNode != null ? chkNode.getXPos() : null, pc);
return sp;
}
case NEW_GPSPOSITION: {
double latitude;
double longitude;
double altitude = Double.MIN_VALUE;
String name = null;
if (cmd.getParam() == 4) {
latitude = stack[sp-3].doubleValue();
longitude = stack[sp-2].doubleValue();
altitude = stack[sp-1].doubleValue();
name = stack[sp].isNull() ? null : stack[sp].stringValue();
sp -= 3;
} else if (cmd.getParam() == 3) {
if (!stack[sp].isNull() || stack[sp].getItemId()==XD_DOUBLE
|| stack[sp].getItemId()==XD_DECIMAL
|| stack[sp].getItemId()==XD_LONG){
altitude = stack[sp].doubleValue();
} else {
name = stack[sp].stringValue();
}
latitude = stack[sp-2].doubleValue();
longitude = stack[sp-1].doubleValue();
sp -= 2;
} else {
latitude = stack[sp-1].doubleValue();
longitude = stack[sp].doubleValue();
sp--;
}
try {
stack[sp] = new DefGPSPosition(
new GPSPosition(latitude, longitude, altitude, name));
} catch (Exception ex) {
//Incorrect GPS position &{0}
cp.putError(chkNode, XDEF.XDEF222,
latitude+","+longitude+","+altitude+","+name);
stack[sp] = DefNull.genNullValue(XD_GPSPOSITION);
}
return sp;
}
case NEW_CURRAMOOUNT: {
try {
stack[sp-1] = new DefPrice(new Price(
stack[sp-1].doubleValue(), stack[sp].stringValue()));
} catch (Exception ex) {
//"Invalid currency code: "{0}"
cp.putError(chkNode, XDEF.XDEF575,
stack[sp-1].toString() + " " + stack[sp].stringValue());
stack[sp-1] = DefNull.genNullValue(XD_PRICE);
}
return --sp;
}
////////////////////////////////////////////////////////////////////////////////
// External methods
////////////////////////////////////////////////////////////////////////////////
case EXTMETHOD:
case EXTMETHOD_CHKEL:
case EXTMETHOD_ARRAY:
case EXTMETHOD_CHKEL_ARRAY:
case EXTMETHOD_XXNODE:
case EXTMETHOD_XDARRAY:
case EXTMETHOD_CHKEL_XDARRAY:
case EXTMETHOD_XXNODE_XDARRAY:
case EXTMETHOD_XXELEM: {
int paramCount = cmd.getParam();
short code = cmd.getCode();
Object[] pars;
XDValue[] parlist;
CodeExtMethod dm = (CodeExtMethod) cmd;
Method m = dm.getExtMethod();
if (code == EXTMETHOD ||
code == EXTMETHOD_CHKEL ||
code == EXTMETHOD_XXNODE ||
code == EXTMETHOD_ARRAY ||
code == EXTMETHOD_XDARRAY ||
code == EXTMETHOD_CHKEL_XDARRAY ||
code == EXTMETHOD_XXNODE_XDARRAY ||
code == EXTMETHOD_CHKEL_ARRAY ||
code == EXTMETHOD_XXELEM) {
if (paramCount == 0) {
switch (code) {
case EXTMETHOD_CHKEL_XDARRAY:
pars = new Object[] {chkNode, new XDValue[0]};
break;
case EXTMETHOD_XXNODE_XDARRAY:
pars = new Object[] {
(XXNode) chkNode, new XDValue[0]};
break;
case EXTMETHOD_XDARRAY:
pars = new Object[] {new XDValue[0]};
break;
case EXTMETHOD_CHKEL:
pars = new Object[] {chkNode};
break;
case EXTMETHOD_XXNODE:
pars = new Object[] {(XXNode) chkNode};
break;
case EXTMETHOD_XXELEM:
pars = new Object[] {(XXElement) chkNode};
break;
default:
pars = new Object[0];
}
} else {
int k;
if (code == EXTMETHOD_CHKEL ||
code == EXTMETHOD_XXNODE ||
code == EXTMETHOD_XXNODE_XDARRAY ||
code == EXTMETHOD_CHKEL_XDARRAY ||
code == EXTMETHOD_CHKEL_ARRAY) {
pars = new Object[paramCount + 1];
pars[0] = chkNode;
if (code == EXTMETHOD_XXNODE ||
code == EXTMETHOD_XXNODE_XDARRAY) {
pars[0] = (XXNode) chkNode;
}
k = 1;
} else {
pars = new Object[paramCount];
k = 0;
}
if (code == EXTMETHOD ||
code == EXTMETHOD_CHKEL ||
code == EXTMETHOD_XXELEM ||
code == EXTMETHOD_XXNODE) {
Class<?>[] p = m.getParameterTypes();
for (int i = sp - paramCount + 1, j = 0;
i <= sp; i++, j++) {
if (p[j + k].equals(XDValue.class)) {
pars[j + k] = stack[i];
continue;
}
switch (stack[i].getItemId()) {
case XD_DECIMAL:
pars[j + k] = stack[i].decimalValue();
break;
case XD_BIGINTEGER:
pars[j + k] = stack[i].integerValue();
break;
case XD_LONG: {
Class<?> x;
if ((x = p[j+k]).equals(Long.TYPE) ||
x.equals(Long.class)) {
pars[j+k] = stack[i].longValue();
} else if (x.equals(Integer.TYPE)
|| x.equals(Integer.class)){
pars[j+k] = stack[i].intValue();
} else if (x.equals(Short.TYPE)
|| x.equals(Short.class)){
pars[j+k] = stack[i].shortValue();
} else if (x.equals(Byte.TYPE) ||
x.equals(Byte.class)){
pars[j+k] = stack[i].byteValue();
}
break;
}
case XD_DOUBLE:
Class<?> x;
if ((x=p[j+k]).equals(Double.TYPE)
|| x.equals(Double.class)) {
pars[j+k] = stack[i].doubleValue();
} else if (x.equals(Float.TYPE)
|| x.equals(Float.class)) {
pars[j+k] = stack[i].floatValue();
}
break;
case XD_BOOLEAN:
pars[j + k] = stack[i].booleanValue() ?
Boolean.TRUE : Boolean.FALSE;
break;
case XD_STRING:
pars[j + k]= stack[i].stringValue();
break;
case XD_DATETIME:
pars[j + k] = stack[i].datetimeValue();
break;
case XD_DURATION:
pars[j + k] = stack[i].durationValue();
break;
case XD_ELEMENT:
pars[j + k] = stack[i].getElement();
break;
case XD_CONTAINER:
pars[j + k] = stack[i];
break;
case XD_BYTES:
pars[j + k] = stack[i].getBytes();
break;
case XD_XPATH:
pars[j + k] = stack[i].stringValue();
break;
case XD_INETADDR:
pars[j + k] = stack[i].getObject();
break;
case XD_REGEX:
case XD_REGEXRESULT:
case XD_INPUT:
case XD_OUTPUT:
case XD_RESULTSET:
case XD_STATEMENT:
case XD_SERVICE:
case XX_ELEMENT:
case XD_PARSERESULT:
case XD_ANY:
case XD_OBJECT:
case XX_ATTR:
case XX_TEXT:
case XD_PARSER:
case XD_GPSPOSITION:
case XD_PRICE:
case XD_EMAIL:
pars[j + k] = stack[i];
break;
default:
//Internal error: &{0}
throw new SError(XDEF.XDEF202,
"Undefined type on PC=" +
(pc - 1) + "; " +
cmd.getClass().getName() +
"; code= " + code);
}
}
} else {
parlist = new XDValue[paramCount];
System.arraycopy(stack,
sp - paramCount + 1, parlist, 0, paramCount);
switch (code) {
case EXTMETHOD_CHKEL_XDARRAY:
case EXTMETHOD_CHKEL_ARRAY:
pars = new Object[] {chkNode, parlist};
break;
case EXTMETHOD_XXNODE_XDARRAY:
pars=new Object[]{(XXNode)chkNode, parlist};
break;
default:
//EXTERNAL_METHOD_ARRAY_CODE
pars = new Object[] {parlist};
break;
}
}
sp -= paramCount;
}
Object o = m.invoke(null, pars);
short type = dm.getItemId();
if (o == null) {
stack[++sp] = DefNull.genNullValue(type);
} else if (o instanceof XDValue) {
if (type != XD_VOID) {
XDValue x = (XDValue) o;
if (type == x.getItemId()) {
stack[++sp] = x;
} else {
switch (dm.getItemId()) {
case XD_VOID:
break;
case XD_DECIMAL:
stack[++sp] = new DefDecimal(
(BigDecimal) o);
break;
case XD_BIGINTEGER:
stack[++sp] = new DefBigInteger(
(BigInteger) o);
break;
case XD_LONG:
stack[++sp] =
new DefLong(x.longValue());
break;
case XD_DOUBLE:
stack[++sp] =
new DefDouble(x.doubleValue());
break;
case XD_BOOLEAN:
stack[++sp] =
new DefBoolean(x.booleanValue());
break;
case XD_STRING:
stack[++sp] =
new DefString(x.stringValue());
break;
default:
stack[++sp] = x;
break;
}
}
}
} else {
switch (dm.getItemId()) {
case XD_VOID:
break;
case XD_DECIMAL:
stack[++sp] = new DefDecimal((BigDecimal) o);
break;
case XD_BIGINTEGER:
stack[++sp] = new DefBigInteger((BigInteger) o);
break;
case XD_LONG:
stack[++sp] =
new DefLong(((Number) o).longValue());
break;
case XD_DOUBLE:
stack[++sp] =
new DefDouble(((Number) o).doubleValue());
break;
case XD_BOOLEAN:
stack[++sp] = new DefBoolean(((Boolean) o));
break;
case XD_STRING:
stack[++sp] = new DefString((String) o);
break;
case XD_DATETIME: {
stack[++sp] = new DefDate((SDatetime) o);
break;
}
case XD_DURATION: {
stack[++sp] = new DefDuration((SDuration) o);
break;
}
case XD_ELEMENT:
stack[++sp] = new DefElement((Element) o);
break;
case XD_CONTAINER:
stack[++sp] = (DefContainer) o;
break;
case XD_BYTES:
stack[++sp] = new DefBytes((byte[]) o);
break;
case XD_OBJECT:
stack[++sp] = (DefObject) o;
break;
case XD_PARSER:
stack[++sp] = (XDParser) o;
break;
case XD_PARSERESULT:
stack[++sp] = (XDParseResult) o;
break;
case XD_ANY:
stack[++sp] = (XDValue) o;
break;
default:
//Internal error: &{0}
throw new SError(XDEF.XDEF202,
"Undefined result type on PC = "+
(pc - 1) + "; " +
cmd.getClass().getName() + "; code= " +
code + "; type= " + dm.getItemId());
}
}
return sp;
}
if (paramCount != 0) {
parlist = new XDValue[paramCount];
sp -= paramCount;
System.arraycopy(stack, sp + 1, parlist, 0, paramCount);
}
return sp;
}
default:
//Internal error: &{0}
throw new XXException(XDEF.XDEF202,
"Undefined code on PC = " + (pc - 1) + "; " +
cmd.toString() + "; code=" +
CodeDisplay.getCodeName(cmd.getCode()));
}
}
}
|
/*
* Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.comprehend.model;
import javax.annotation.Generated;
/**
* <p>
* The KMS customer managed key (CMK) entered cannot be validated. Verify the key and re-enter it.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class KmsKeyValidationException extends com.amazonaws.services.comprehend.model.AmazonComprehendException {
private static final long serialVersionUID = 1L;
/**
* Constructs a new KmsKeyValidationException with the specified error message.
*
* @param message
* Describes the error encountered.
*/
public KmsKeyValidationException(String message) {
super(message);
}
}
|
/**
*
*/
package uk.bl.wa.nlp.wordvec;
/*-
* #%L
* warc-nlp
* %%
* Copyright (C) 2013 - 2018 The webarchive-discovery project contributors
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
import java.io.Reader;
import java.io.StringReader;
import java.util.Iterator;
import java.util.List;
import org.deeplearning4j.text.sentenceiterator.SentenceIterator;
import org.deeplearning4j.text.sentenceiterator.SentencePreProcessor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import edu.stanford.nlp.ling.HasWord;
import edu.stanford.nlp.ling.Sentence;
import edu.stanford.nlp.process.DocumentPreprocessor;
/**
* @author Andrew Jackson <Andrew.Jackson@bl.uk>
*
*/
public class StanfordSentenceIterator implements SentenceIterator {
private static Logger log = LoggerFactory
.getLogger(StanfordSentenceIterator.class);
private DocumentPreprocessor dp;
private Iterator<List<HasWord>> dpi;
public StanfordSentenceIterator(String paragraph) {
Reader reader = new StringReader(paragraph);
dp = new DocumentPreprocessor(reader);
dpi = dp.iterator();
}
public StanfordSentenceIterator(Reader reader) {
dp = new DocumentPreprocessor(reader);
dpi = dp.iterator();
}
/* (non-Javadoc)
* @see org.deeplearning4j.text.sentenceiterator.SentenceIterator#nextSentence()
*/
@Override
public String nextSentence() {
List<HasWord> item = dpi.next();
String sentence = Sentence.listToString(item);
log.info("Got item " + sentence);
return sentence;
}
/* (non-Javadoc)
* @see org.deeplearning4j.text.sentenceiterator.SentenceIterator#hasNext()
*/
@Override
public boolean hasNext() {
try {
return dpi.hasNext();
} catch (Exception e) {
log.error("Exception when looking for the next sentence!", e);
return false;
}
}
/* (non-Javadoc)
* @see org.deeplearning4j.text.sentenceiterator.SentenceIterator#reset()
*/
@Override
public void reset() {
dpi = dp.iterator();
}
/* (non-Javadoc)
* @see org.deeplearning4j.text.sentenceiterator.SentenceIterator#finish()
*/
@Override
public void finish() {
}
/* (non-Javadoc)
* @see org.deeplearning4j.text.sentenceiterator.SentenceIterator#getPreProcessor()
*/
@Override
public SentencePreProcessor getPreProcessor() {
return null;
}
/* (non-Javadoc)
* @see org.deeplearning4j.text.sentenceiterator.SentenceIterator#setPreProcessor(org.deeplearning4j.text.sentenceiterator.SentencePreProcessor)
*/
@Override
public void setPreProcessor(SentencePreProcessor preProcessor) {
throw new RuntimeException("Unsupported");
}
}
|
package org.bukkit.event.block;
import org.bukkit.block.Block;
import org.bukkit.entity.Player;
import org.bukkit.event.Cancellable;
import org.bukkit.event.HandlerList;
import org.bukkit.inventory.ItemStack;
import org.jetbrains.annotations.NotNull;
/**
* Called when a block is damaged by a player.
* <p>
* If a Block Damage event is cancelled, the block will not be damaged.
*/
public class BlockDamageEvent extends BlockEvent implements Cancellable {
private static final HandlerList handlers = new HandlerList();
private final Player player;
private boolean instaBreak;
private boolean cancel;
private final ItemStack itemstack;
public BlockDamageEvent(@NotNull final Player player, @NotNull final Block block, @NotNull final ItemStack itemInHand, final boolean instaBreak) {
super(block);
this.instaBreak = instaBreak;
this.player = player;
this.itemstack = itemInHand;
this.cancel = false;
}
/**
* Gets the player damaging the block involved in this event.
*
* @return The player damaging the block involved in this event
*/
@NotNull
public Player getPlayer() {
return player;
}
/**
* Gets if the block is set to instantly break when damaged by the player.
*
* @return true if the block should instantly break when damaged by the
* player
*/
public boolean getInstaBreak() {
return instaBreak;
}
/**
* Sets if the block should instantly break when damaged by the player.
*
* @param bool true if you want the block to instantly break when damaged
* by the player
*/
public void setInstaBreak(boolean bool) {
this.instaBreak = bool;
}
/**
* Gets the ItemStack for the item currently in the player's hand.
*
* @return The ItemStack for the item currently in the player's hand
*/
@NotNull
public ItemStack getItemInHand() {
return itemstack;
}
@Override
public boolean isCancelled() {
return cancel;
}
@Override
public void setCancelled(boolean cancel) {
this.cancel = cancel;
}
@NotNull
@Override
public HandlerList getHandlers() {
return handlers;
}
@NotNull
public static HandlerList getHandlerList() {
return handlers;
}
}
|
/*
* Copyright 2015-2017 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.core.models.sample;
import org.opencb.commons.datastore.core.ObjectMap;
import org.opencb.opencga.core.models.AbstractAclEntry;
import java.util.Collections;
import java.util.EnumSet;
import java.util.List;
import java.util.stream.Collectors;
/**
* Created by pfurio on 11/05/16.
*/
public class SampleAclEntry extends AbstractAclEntry<SampleAclEntry.SamplePermissions> {
public enum SamplePermissions {
VIEW,
UPDATE,
DELETE,
WRITE_ANNOTATIONS,
VIEW_ANNOTATIONS,
DELETE_ANNOTATIONS
}
public SampleAclEntry() {
this("", Collections.emptyList());
}
public SampleAclEntry(String member, EnumSet<SamplePermissions> permissions) {
super(member, permissions);
}
public SampleAclEntry(String member, ObjectMap permissions) {
super(member, EnumSet.noneOf(SamplePermissions.class));
EnumSet<SamplePermissions> aux = EnumSet.allOf(SamplePermissions.class);
for (SamplePermissions permission : aux) {
if (permissions.containsKey(permission.name()) && permissions.getBoolean(permission.name())) {
this.permissions.add(permission);
}
}
}
public SampleAclEntry(String member, List<String> permissions) {
super(member, EnumSet.noneOf(SamplePermissions.class));
if (permissions.size() > 0) {
this.permissions.addAll(permissions.stream().map(SamplePermissions::valueOf).collect(Collectors.toList()));
}
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dromara.hmily.repository.redis.jedis;
import java.util.Set;
/**
* JedisClient.
*
* @author xiaoyu(Myth)
*/
public interface JedisClient {
/**
* Set string.
*
* @param key the key
* @param value the value
* @return the string
*/
String set(String key, String value);
/**
* Set string.
*
* @param key the key
* @param value the value
* @return the string
*/
String set(String key, byte[] value);
/**
* Del long.
*
* @param keys the keys
* @return the long
*/
Long del(String... keys);
/**
* Get string.
*
* @param key the key
* @return the string
*/
String get(String key);
/**
* Get byte [ ].
*
* @param key the key
* @return the byte [ ]
*/
byte[] get(byte[] key);
/**
* Keys set.
*
* @param pattern the pattern
* @return the set
*/
Set<byte[]> keys(byte[] pattern);
/**
* Keys set.
*
* @param key the key
* @return the set
*/
Set<String> keys(String key);
/**
* Hset long.
*
* @param key the key
* @param item the item
* @param value the value
* @return the long
*/
Long hset(String key, String item, String value);
/**
* Hget string.
*
* @param key the key
* @param item the item
* @return the string
*/
String hget(String key, String item);
/**
* Hdel long.
*
* @param key the key
* @param item the item
* @return the long
*/
Long hdel(String key, String item);
/**
* Incr long.
*
* @param key the key
* @return the long
*/
Long incr(String key);
/**
* Decr long.
*
* @param key the key
* @return the long
*/
Long decr(String key);
/**
* Expire long.
*
* @param key the key
* @param second the second
* @return the long
*/
Long expire(String key, int second);
/**
* Zrange set.
*
* @param key the key
* @param start the start
* @param end the end
* @return the set
*/
Set<String> zrange(String key, long start, long end);
}
|
/*
* $Id$
*
* This is open-source software written by Sutron Corporation, under
* contract to the federal government. You are free to copy and use this
* source code for your own purposes, except that no part of the information
* contained in this file may be claimed to be proprietary.
*
* Except for specific contractual terms between Sutron and the federal
* government, this source code is provided completely without warranty.
* For more information contact: info@ilexeng.com
*/
package lrgs.noaaportrecv;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.TimeZone;
import ilex.util.ByteUtil;
import ilex.util.EnvExpander;
import ilex.util.Logger;
import lrgs.common.DcpMsg;
import lrgs.iridiumsbd.IridiumSbdInterface;
import lrgs.lrgsmain.LrgsConfig;
/**
Handles the parsing of messages from the NOAAPORT socket.
*/
public class NoaaportProtocol
{
protected NoaaportRecv noaaportRecv;
protected InputStream input;
enum States { HUNT, SEQNUM, PROPHEADER, DCPMSG };
States currentState;
private byte header_buf[];
private int hb_len = 0;
protected byte message_buf[];
protected int mb_len = 0;
private static int HEADER_MAX = 100;
protected static int MESSAGE_MAX = 20000;
protected NoaaportConnection parent;
protected String clientName;
private boolean seqNumPresent = false;
private int seqNum;
private BufferedOutputStream captureStream = null;
private String captureFileName = null;
public NoaaportProtocol(InputStream input, NoaaportRecv noaaportRecv,
NoaaportConnection parent, String clientName)
throws IOException
{
this.input = input;
this.noaaportRecv = noaaportRecv;
this.parent = parent;
this.clientName = clientName;
// Sequence #s are present for Unisys and PDI with passthrough. Not Marta.
// Also for Unisys, we are the client. Thus use that to detect:
seqNumPresent = parent instanceof NoaaportClient;
header_buf = new byte[HEADER_MAX];
message_buf = new byte[MESSAGE_MAX];
hb_len = mb_len = 0;
currentState = States.HUNT;
captureFileName = EnvExpander.expand(
LrgsConfig.instance().noaaportCaptureFile);
if (captureFileName != null && captureFileName.trim().length() > 0)
{
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd-HHmmss");
sdf.setTimeZone(TimeZone.getTimeZone("UTC"));
captureFileName = captureFileName + "-" + sdf.format(new Date());
info(" New client, captureFile = '" + captureFileName + "'");
File f = new File(EnvExpander.expand(captureFileName));
try
{
captureStream = new BufferedOutputStream(
new FileOutputStream(f));
}
catch(IOException ex)
{
warning("Cannot open capture file '" + f.getPath() + "': "
+ ex);
captureStream = null;
}
}
}
protected int readByte()
throws IOException
{
int c = input.read();
if (c == -1)
{
warning("NOAAPORT receiver hung up.");
disconnect();
return -1;
}
c = c & 0xff;
if (captureStream != null)
captureStream.write(c);
return c;
}
/**
* Repeatedly called from base-class until connection is broken.
*/
protected void read()
{
noaaportRecv.setStatus("Receiving");
try
{
// Attempt to read some data from the client.
switch(currentState)
{
case HUNT:
huntState();
break;
case SEQNUM:
seqNumState();
break;
case PROPHEADER:
productHeaderState();
break;
case DCPMSG:
dcpmsgState();
break;
default:
warning("Unknown state " + currentState
+ " -- disconnecting.");
disconnect();
break;
}
}
catch(IOException ex)
{
info("" + NoaaportRecv.EVT_RECV_FAILED
+ " Error on connection to " + clientName + ": " + ex);
disconnect();
}
}
/**
* Look for CTRL-A (SOH).
* @throws IOException
*/
private void huntState()
throws IOException
{
int c = -1;
while(parent.isConnected() && (c = readByte()) != -1)
{
if (c == 0x01)
{
currentState =
seqNumPresent? States.SEQNUM : States.PROPHEADER;
hb_len = mb_len = 0;
seqNum = -1;
return;
}
}
}
/**
* Skip white space \r\r\n then parse digits into sequence number
*/
private void seqNumState()
throws IOException
{
int i = readByte();
if (i == -1)
return;
char c = (char)i;
if (Character.isWhitespace(c))
{
// whitespace after seqnum -- switch to PROPHEADER
if (seqNum != -1)
currentState = States.PROPHEADER;
return;
}
else if (Character.isDigit(c))
{
if (seqNum == -1)
seqNum = i;
else
seqNum = (seqNum*10) + (i - 48);
}
}
/**
* Header is after SOH (01) up until 1E (EOH?).
* Collect header data in header_buf.
* Verify that it has KWAL and type 'S'.
* @throws IOException
*/
private void productHeaderState()
throws IOException
{
int c = readByte();
if (c == -1)
return;
// Skip initial whitespace
if (Character.isWhitespace((char)c) && hb_len == 0)
return;
else if (c == 0x1e) // 0x1E means end of header
{
if (hb_len == 0)
{
warning(":" + NoaaportRecv.EVT_HEADER_PARSE
+ " No data in WMO header, len=" + hb_len);
currentState = States.HUNT;
return;
}
if ((char)header_buf[0] != 'S')
{
info(" Skipping non-DCP-message with WMO header '" +
(new String(header_buf, 0, hb_len < 6 ? hb_len : 6)) + "'");
currentState = States.HUNT;
return;
}
if (hb_len <= 6)
{
debug(":" + NoaaportRecv.EVT_HEADER_PARSE
+ " No data after WMO header, len=" + hb_len);
currentState = States.HUNT;
return;
}
if (hb_len < 11)
{
debug(":" + NoaaportRecv.EVT_HEADER_PARSE
+ " No office-ID header, len=" + hb_len);
currentState = States.HUNT;
return;
}
// Office ID must be 'KWAL'
String officeId = new String(header_buf, 7 , 4);
if (!officeId.equals("KWAL"))
{
info(" Skipping non-DCP-message with office '" + officeId + "'");
currentState = States.HUNT;
return;
}
mb_len = 0;
currentState = States.DCPMSG;
}
else if (c == 0x01)
{
debug(":" + NoaaportRecv.EVT_HEADER_PARSE
+ " Unexpected SOH. No 0x1e seen. Discarding "
+ hb_len + " bytes.");
currentState = States.PROPHEADER;
hb_len = mb_len = 0;
return;
}
else
{
header_buf[hb_len++] = (byte)c;
if (hb_len == HEADER_MAX)
{
debug(":" + NoaaportRecv.EVT_HEADER_PARSE
+ " Header too long before 0x1E");
currentState = States.HUNT;
}
}
}
private void dcpmsgState()
throws IOException
{
int c = readByte();
if (c == -1)
return;
if (c == 0x03)
{
if (mb_len < 29)
debug(":" + NoaaportRecv.EVT_MESSAGE_PARSE
+ " Message too short, len=" + mb_len
+ ", hdr='" + (new String(header_buf, 0, hb_len))
+ "', msg='" + (new String(message_buf, 0, mb_len)));
else
processMessage();
currentState = States.HUNT;
}
else if (c == 0x01)
{
debug(":" + NoaaportRecv.EVT_MESSAGE_PARSE
+ " Unexpected SOH. No ETX seen. Discarding "
+ mb_len + " bytes.");
currentState = States.PROPHEADER;
hb_len = mb_len = 0;
return;
}
else
{
message_buf[mb_len++] = (byte)c;
if (mb_len == MESSAGE_MAX)
{
debug(":" + NoaaportRecv.EVT_MESSAGE_PARSE
+ " Message too long before 0x03");
currentState = States.HUNT;
}
}
}
/** Called when we now have a message sitting in the buffer. */
protected void processMessage()
{
debug(" Processing buffered message of length " + mb_len
+ " '" + new String(message_buf) + "' byte[0]=" + message_buf[0]
+ ", byte[1]=" + message_buf[1]);
// Remove white space from the end of the message buffer
while(mb_len > 0 && Character.isWhitespace((char)message_buf[mb_len-1]))
mb_len--;
// CCCS should now be end of buffer, where CCC is 3-digit channel
// and S is 'E' or 'W'
// Convert the NOAAPORT format to DOMSAT
// (18=header bytes before msg, 12 = trailer bytes after msg)
int domsatLen = mb_len - 18 - 12;
byte domsatBuf[] = new byte[domsatLen + DcpMsg.IDX_DATA];
for(int i=0; i<8; i++)
{
byte c = message_buf[i];
if (!ByteUtil.isHexChar(c))
{
warning("" + NoaaportRecv.EVT_MESSAGE_PARSE
+ " non-hex-digit '" + (char)c
+ "' at position " + i
+ " in DCP address field -- msg skipped.");
currentState = States.HUNT;
return;
}
domsatBuf[DcpMsg.IDX_DCP_ADDR + i] = c;
}
// Assume my clock is up-to-date. So day-of-year in the msg
// should always be <= today. If it's > today, assume that it's
// from the previous year.
int day = ((int)message_buf[9] - (int)'0') * 100
+ ((int)message_buf[10] - (int)'0') * 10
+ ((int)message_buf[11] - (int)'0');
GregorianCalendar cal = new GregorianCalendar();
cal.setTimeZone(TimeZone.getTimeZone("UTC"));
int year = cal.get(Calendar.YEAR);
if (day > cal.get(Calendar.DAY_OF_YEAR))
--year;
domsatBuf[DcpMsg.IDX_YEAR] = (byte)((byte)'0' + ((year % 100) / 10));
domsatBuf[DcpMsg.IDX_YEAR+1] = (byte)((int)'0' + (year % 10));
for(int i=0; i<9; i++)
{
char c = (char)message_buf[i+9];
if (!Character.isDigit(c))
{
warning("" + NoaaportRecv.EVT_MESSAGE_PARSE
+ " non-digit in Date/Time field -- msg skipped.");
currentState = States.HUNT;
return;
}
domsatBuf[DcpMsg.IDX_DAY + i] = message_buf[i+9];
}
domsatBuf[DcpMsg.IDX_FAILCODE] =
message_buf[8] == (byte)'?' ? (byte)'?' : (byte)'G';
domsatBuf[DcpMsg.IDX_SIGSTRENGTH] = message_buf[mb_len-11];
domsatBuf[DcpMsg.IDX_SIGSTRENGTH+1] = message_buf[mb_len-10];
domsatBuf[DcpMsg.IDX_FREQOFFSET] = message_buf[mb_len-9];
domsatBuf[DcpMsg.IDX_FREQOFFSET+1] = message_buf[mb_len-8];
domsatBuf[DcpMsg.IDX_MODINDEX] = message_buf[mb_len-7];
domsatBuf[DcpMsg.IDX_DATAQUALITY] = message_buf[mb_len-6];
for(int i=0; i<3; i++)
{
char c = (char)message_buf[mb_len-4+i];
if (c == ' ')
c = '0';
else if (!Character.isDigit(c))
{
warning("" + NoaaportRecv.EVT_MESSAGE_PARSE
+ " non-digit in channel field '"
+ (new String(message_buf, mb_len-4, 3))
+ "' -- msg skipped.");
currentState = States.HUNT;
return;
}
domsatBuf[DcpMsg.IDX_GOESCHANNEL+i] = (byte)c;
}
domsatBuf[DcpMsg.IDX_GOES_SC] = message_buf[mb_len-1];
//Logger.instance().debug1("channel/sc field '" + new String(domsatBuf, DcpMsg.IDX_GOESCHANNEL, 4) + "'");
domsatBuf[DcpMsg.DRGS_CODE] = (byte)'N';
domsatBuf[DcpMsg.DRGS_CODE+1] = (byte)'P';
domsatBuf[DcpMsg.IDX_DATALENGTH] =
(byte)(domsatLen / 10000 + (int)'0');
domsatBuf[DcpMsg.IDX_DATALENGTH+1] =
(byte)((domsatLen%10000) / 1000 + (int)'0');
domsatBuf[DcpMsg.IDX_DATALENGTH+2] =
(byte)((domsatLen%1000) / 100 + (int)'0');
domsatBuf[DcpMsg.IDX_DATALENGTH+3] =
(byte)((domsatLen%100) / 10 + (int)'0');
domsatBuf[DcpMsg.IDX_DATALENGTH+4] =
(byte)((domsatLen%10) + (int)'0');
for(int i=0; i<domsatLen; i++)
domsatBuf[DcpMsg.IDX_DATA+i] = message_buf[18+i];
DcpMsg msg = new DcpMsg(domsatBuf, domsatLen + 37, 0);
if (seqNumPresent && seqNum >= 0)
msg.setSequenceNum(seqNum);
noaaportRecv.archive(msg);
}
protected void warning(String msg)
{
Logger.instance().warning(noaaportRecv.module + ":" + msg);
}
protected void debug(String msg)
{
Logger.instance().debug1(noaaportRecv.module + ":" + msg);
}
protected void info(String msg)
{
Logger.instance().info(noaaportRecv.module + ":" + msg);
}
protected void disconnect( )
{
if (captureStream != null)
{
try { captureStream.close(); captureStream = null; }
catch(Exception ex) {}
}
noaaportRecv.setStatus("Disconnected");
info("Disconnecting from " + clientName);
parent.disconnect();
}
}
|
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.ballerinalang.cassandra;
/**
* Constants for Cassandra Connector.
*
* @since 0.95.0
*/
public final class Constants {
public static final String QUESTION_MARK = "?";
public static final String CASSANDRA_PACKAGE_PATH = "wso2/cassandra:0.0.0";
public static final String CLIENT = "Client";
public static final String CASSANDRA_PARAMETER = "Parameter";
public static final String CASSANDRA_EXCEPTION_OCCURED = "Exception Occurred while executing Cassandra database "
+ "operation";
public static final String CQL_TYPE_FIELD = "cqlType";
public static final String VALUE_FIELD = "value";
public static final String DATABASE_ERROR_DATA_RECORD_NAME = "DatabaseErrorData";
public static final String DATABASE_ERROR_CODE = "{wso2/cassandra}DatabaseError";
/**
* Constants for DataTypes.
*/
public static final class DataTypes {
public static final String LIST = "LIST";
public static final String INT = "INT";
public static final String BIGINT = "BIGINT";
public static final String VARINT = "VARINT";
public static final String FLOAT = "FLOAT";
public static final String DOUBLE = "DOUBLE";
public static final String TEXT = "TEXT";
public static final String BOOLEAN = "BOOLEAN";
}
/**
* Constants for Endpoint Configs.
*/
public static final class EndpointConfig {
public static final String HOST = "host";
public static final String PORT = "port";
public static final String USERNAME = "username";
public static final String PASSWORD = "password";
public static final String OPTIONS = "options";
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.blockmanagement;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.net.DFSNetworkTopology;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor.BlockTargetPair;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfoWithStorage;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.hdfs.server.common.StorageInfo;
import org.apache.hadoop.hdfs.server.protocol.BlockCommand;
import org.apache.hadoop.hdfs.server.protocol.BlockECReconstructionCommand;
import org.apache.hadoop.hdfs.server.protocol.BlockECReconstructionCommand.BlockECReconstructionInfo;
import org.apache.hadoop.hdfs.server.protocol.DatanodeCommand;
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports;
import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports;
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
import org.apache.hadoop.net.DNSToSwitchMapping;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.util.Shell;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.internal.util.reflection.Whitebox;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.*;
public class TestDatanodeManager {
public static final Log LOG = LogFactory.getLog(TestDatanodeManager.class);
//The number of times the registration / removal of nodes should happen
final int NUM_ITERATIONS = 500;
private static DatanodeManager mockDatanodeManager(
FSNamesystem fsn, Configuration conf) throws IOException {
BlockManager bm = Mockito.mock(BlockManager.class);
BlockReportLeaseManager blm = new BlockReportLeaseManager(conf);
Mockito.when(bm.getBlockReportLeaseManager()).thenReturn(blm);
DatanodeManager dm = new DatanodeManager(bm, fsn, conf);
return dm;
}
/**
* Create an InetSocketAddress for a host:port string
* @param host a host identifier in host:port format
* @return a corresponding InetSocketAddress object
*/
private static InetSocketAddress entry(String host) {
return HostFileManager.parseEntry("dummy", "dummy", host);
}
/**
* This test checks that if a node is re-registered with a new software
* version after the heartbeat expiry interval but before the HeartbeatManager
* has a chance to detect this and remove it, the node's version will still
* be correctly decremented.
*/
@Test
public void testNumVersionsCorrectAfterReregister()
throws IOException, InterruptedException {
//Create the DatanodeManager which will be tested
FSNamesystem fsn = Mockito.mock(FSNamesystem.class);
Mockito.when(fsn.hasWriteLock()).thenReturn(true);
Configuration conf = new Configuration();
conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 0);
conf.setLong(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 10);
DatanodeManager dm = mockDatanodeManager(fsn, conf);
String storageID = "someStorageID1";
String ip = "someIP" + storageID;
// Register then reregister the same node but with a different version
for (int i = 0; i <= 1; i++) {
dm.registerDatanode(new DatanodeRegistration(
new DatanodeID(ip, "", storageID, 9000, 0, 0, 0),
null, null, "version" + i));
if (i == 0) {
Thread.sleep(25);
}
}
//Verify DatanodeManager has the correct count
Map<String, Integer> mapToCheck = dm.getDatanodesSoftwareVersions();
assertNull("should be no more version0 nodes", mapToCheck.get("version0"));
assertEquals("should be one version1 node",
mapToCheck.get("version1").intValue(), 1);
}
/**
* This test sends a random sequence of node registrations and node removals
* to the DatanodeManager (of nodes with different IDs and versions), and
* checks that the DatanodeManager keeps a correct count of different software
* versions at all times.
*/
@Test
public void testNumVersionsReportedCorrect() throws IOException {
//Create the DatanodeManager which will be tested
FSNamesystem fsn = Mockito.mock(FSNamesystem.class);
Mockito.when(fsn.hasWriteLock()).thenReturn(true);
DatanodeManager dm = mockDatanodeManager(fsn, new Configuration());
//Seed the RNG with a known value so test failures are easier to reproduce
Random rng = new Random();
int seed = rng.nextInt();
rng = new Random(seed);
LOG.info("Using seed " + seed + " for testing");
//A map of the Storage IDs to the DN registration it was registered with
HashMap <String, DatanodeRegistration> sIdToDnReg =
new HashMap<String, DatanodeRegistration>();
for(int i=0; i<NUM_ITERATIONS; ++i) {
//If true, remove a node for every 3rd time (if there's one)
if(rng.nextBoolean() && i%3 == 0 && sIdToDnReg.size()!=0) {
//Pick a random node.
int randomIndex = rng.nextInt() % sIdToDnReg.size();
//Iterate to that random position
Iterator<Map.Entry<String, DatanodeRegistration>> it =
sIdToDnReg.entrySet().iterator();
for(int j=0; j<randomIndex-1; ++j) {
it.next();
}
DatanodeRegistration toRemove = it.next().getValue();
LOG.info("Removing node " + toRemove.getDatanodeUuid() + " ip " +
toRemove.getXferAddr() + " version : " + toRemove.getSoftwareVersion());
//Remove that random node
dm.removeDatanode(toRemove);
it.remove();
}
// Otherwise register a node. This node may be a new / an old one
else {
//Pick a random storageID to register.
String storageID = "someStorageID" + rng.nextInt(5000);
DatanodeRegistration dr = Mockito.mock(DatanodeRegistration.class);
Mockito.when(dr.getDatanodeUuid()).thenReturn(storageID);
//If this storageID had already been registered before
if(sIdToDnReg.containsKey(storageID)) {
dr = sIdToDnReg.get(storageID);
//Half of the times, change the IP address
if(rng.nextBoolean()) {
dr.setIpAddr(dr.getIpAddr() + "newIP");
}
} else { //This storageID has never been registered
//Ensure IP address is unique to storageID
String ip = "someIP" + storageID;
Mockito.when(dr.getIpAddr()).thenReturn(ip);
Mockito.when(dr.getXferAddr()).thenReturn(ip + ":9000");
Mockito.when(dr.getXferPort()).thenReturn(9000);
}
//Pick a random version to register with
Mockito.when(dr.getSoftwareVersion()).thenReturn(
"version" + rng.nextInt(5));
LOG.info("Registering node storageID: " + dr.getDatanodeUuid() +
", version: " + dr.getSoftwareVersion() + ", IP address: "
+ dr.getXferAddr());
//Register this random node
dm.registerDatanode(dr);
sIdToDnReg.put(storageID, dr);
}
//Verify DatanodeManager still has the right count
Map<String, Integer> mapToCheck = dm.getDatanodesSoftwareVersions();
//Remove counts from versions and make sure that after removing all nodes
//mapToCheck is empty
for(Entry<String, DatanodeRegistration> it: sIdToDnReg.entrySet()) {
String ver = it.getValue().getSoftwareVersion();
if(!mapToCheck.containsKey(ver)) {
throw new AssertionError("The correct number of datanodes of a "
+ "version was not found on iteration " + i);
}
mapToCheck.put(ver, mapToCheck.get(ver) - 1);
if(mapToCheck.get(ver) == 0) {
mapToCheck.remove(ver);
}
}
for(Entry <String, Integer> entry: mapToCheck.entrySet()) {
LOG.info("Still in map: " + entry.getKey() + " has "
+ entry.getValue());
}
assertEquals("The map of version counts returned by DatanodeManager was"
+ " not what it was expected to be on iteration " + i, 0,
mapToCheck.size());
}
}
@Test (timeout = 100000)
public void testRejectUnresolvedDatanodes() throws IOException {
//Create the DatanodeManager which will be tested
FSNamesystem fsn = Mockito.mock(FSNamesystem.class);
Mockito.when(fsn.hasWriteLock()).thenReturn(true);
Configuration conf = new Configuration();
//Set configuration property for rejecting unresolved topology mapping
conf.setBoolean(
DFSConfigKeys.DFS_REJECT_UNRESOLVED_DN_TOPOLOGY_MAPPING_KEY, true);
//set TestDatanodeManager.MyResolver to be used for topology resolving
conf.setClass(
CommonConfigurationKeysPublic.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY,
TestDatanodeManager.MyResolver.class, DNSToSwitchMapping.class);
//create DatanodeManager
DatanodeManager dm = mockDatanodeManager(fsn, conf);
//storageID to register.
String storageID = "someStorageID-123";
DatanodeRegistration dr = Mockito.mock(DatanodeRegistration.class);
Mockito.when(dr.getDatanodeUuid()).thenReturn(storageID);
try {
//Register this node
dm.registerDatanode(dr);
Assert.fail("Expected an UnresolvedTopologyException");
} catch (UnresolvedTopologyException ute) {
LOG.info("Expected - topology is not resolved and " +
"registration is rejected.");
} catch (Exception e) {
Assert.fail("Expected an UnresolvedTopologyException");
}
}
/**
* MyResolver class provides resolve method which always returns null
* in order to simulate unresolved topology mapping.
*/
public static class MyResolver implements DNSToSwitchMapping {
@Override
public List<String> resolve(List<String> names) {
return null;
}
@Override
public void reloadCachedMappings() {
}
@Override
public void reloadCachedMappings(List<String> names) {
}
}
/**
* This test creates a LocatedBlock with 5 locations, sorts the locations
* based on the network topology, and ensures the locations are still aligned
* with the storage ids and storage types.
*/
@Test
public void testSortLocatedBlocks() throws IOException, URISyntaxException {
HelperFunction(null, 0);
}
/**
* Execute a functional topology script and make sure that helper
* function works correctly
*
* @throws IOException
* @throws URISyntaxException
*/
@Test
public void testgoodScript() throws IOException, URISyntaxException {
HelperFunction("/" + Shell.appendScriptExtension("topology-script"), 0);
}
/**
* Run a broken script and verify that helper function is able to
* ignore the broken script and work correctly
*
* @throws IOException
* @throws URISyntaxException
*/
@Test
public void testBadScript() throws IOException, URISyntaxException {
HelperFunction("/" + Shell.appendScriptExtension("topology-broken-script"),
0);
}
/**
* Test with different sorting functions but include datanodes.
* with provided storage
* @throws IOException
* @throws URISyntaxException
*/
@Test
public void testWithProvidedTypes() throws IOException, URISyntaxException {
HelperFunction(null, 1);
HelperFunction(null, 3);
HelperFunction("/" + Shell.appendScriptExtension("topology-script"), 1);
HelperFunction("/" + Shell.appendScriptExtension("topology-script"), 2);
}
/**
* Helper function that tests the DatanodeManagers SortedBlock function
* we invoke this function with and without topology scripts
*
* @param scriptFileName - Script Name or null
* @param providedStorages - number of provided storages to add
*
* @throws URISyntaxException
* @throws IOException
*/
public void HelperFunction(String scriptFileName, int providedStorages)
throws URISyntaxException, IOException {
// create the DatanodeManager which will be tested
Configuration conf = new Configuration();
FSNamesystem fsn = Mockito.mock(FSNamesystem.class);
Mockito.when(fsn.hasWriteLock()).thenReturn(true);
if (scriptFileName != null && !scriptFileName.isEmpty()) {
URL shellScript = getClass().getResource(scriptFileName);
Path resourcePath = Paths.get(shellScript.toURI());
FileUtil.setExecutable(resourcePath.toFile(), true);
conf.set(DFSConfigKeys.NET_TOPOLOGY_SCRIPT_FILE_NAME_KEY,
resourcePath.toString());
}
DatanodeManager dm = mockDatanodeManager(fsn, conf);
int totalDNs = 5 + providedStorages;
// register 5 datanodes, each with different storage ID and type
DatanodeInfo[] locs = new DatanodeInfo[totalDNs];
String[] storageIDs = new String[totalDNs];
List<StorageType> storageTypesList = new ArrayList<>(
Arrays.asList(StorageType.ARCHIVE,
StorageType.DEFAULT,
StorageType.DISK,
StorageType.RAM_DISK,
StorageType.SSD));
for (int i = 0; i < providedStorages; i++) {
storageTypesList.add(StorageType.PROVIDED);
}
StorageType[] storageTypes= storageTypesList.toArray(new StorageType[0]);
for (int i = 0; i < totalDNs; i++) {
// register new datanode
String uuid = "UUID-" + i;
String ip = "IP-" + i;
DatanodeRegistration dr = Mockito.mock(DatanodeRegistration.class);
Mockito.when(dr.getDatanodeUuid()).thenReturn(uuid);
Mockito.when(dr.getIpAddr()).thenReturn(ip);
Mockito.when(dr.getXferAddr()).thenReturn(ip + ":9000");
Mockito.when(dr.getXferPort()).thenReturn(9000);
Mockito.when(dr.getSoftwareVersion()).thenReturn("version1");
dm.registerDatanode(dr);
// get location and storage information
locs[i] = dm.getDatanode(uuid);
storageIDs[i] = "storageID-" + i;
}
// set first 2 locations as decomissioned
locs[0].setDecommissioned();
locs[1].setDecommissioned();
// create LocatedBlock with above locations
ExtendedBlock b = new ExtendedBlock("somePoolID", 1234);
LocatedBlock block = new LocatedBlock(b, locs, storageIDs, storageTypes);
List<LocatedBlock> blocks = new ArrayList<>();
blocks.add(block);
final String targetIp = locs[4].getIpAddr();
// sort block locations
dm.sortLocatedBlocks(targetIp, blocks);
// check that storage IDs/types are aligned with datanode locs
DatanodeInfo[] sortedLocs = block.getLocations();
storageIDs = block.getStorageIDs();
storageTypes = block.getStorageTypes();
assertThat(sortedLocs.length, is(totalDNs));
assertThat(storageIDs.length, is(totalDNs));
assertThat(storageTypes.length, is(totalDNs));
for (int i = 0; i < sortedLocs.length; i++) {
assertThat(((DatanodeInfoWithStorage) sortedLocs[i]).getStorageID(),
is(storageIDs[i]));
assertThat(((DatanodeInfoWithStorage) sortedLocs[i]).getStorageType(),
is(storageTypes[i]));
}
// Ensure the local node is first.
assertThat(sortedLocs[0].getIpAddr(), is(targetIp));
// Ensure the two decommissioned DNs were moved to the end.
assertThat(sortedLocs[sortedLocs.length - 1].getAdminState(),
is(DatanodeInfo.AdminStates.DECOMMISSIONED));
assertThat(sortedLocs[sortedLocs.length - 2].getAdminState(),
is(DatanodeInfo.AdminStates.DECOMMISSIONED));
// check that the StorageType of datanoodes immediately
// preceding the decommissioned datanodes is PROVIDED
for (int i = 0; i < providedStorages; i++) {
assertThat(
((DatanodeInfoWithStorage)
sortedLocs[sortedLocs.length - 3 - i]).getStorageType(),
is(StorageType.PROVIDED));
}
}
/**
* Test whether removing a host from the includes list without adding it to
* the excludes list will exclude it from data node reports.
*/
@Test
public void testRemoveIncludedNode() throws IOException {
FSNamesystem fsn = Mockito.mock(FSNamesystem.class);
// Set the write lock so that the DatanodeManager can start
Mockito.when(fsn.hasWriteLock()).thenReturn(true);
DatanodeManager dm = mockDatanodeManager(fsn, new Configuration());
HostFileManager hm = new HostFileManager();
HostSet noNodes = new HostSet();
HostSet oneNode = new HostSet();
HostSet twoNodes = new HostSet();
DatanodeRegistration dr1 = new DatanodeRegistration(
new DatanodeID("127.0.0.1", "127.0.0.1", "someStorageID-123",
12345, 12345, 12345, 12345),
new StorageInfo(HdfsServerConstants.NodeType.DATA_NODE),
new ExportedBlockKeys(), "test");
DatanodeRegistration dr2 = new DatanodeRegistration(
new DatanodeID("127.0.0.1", "127.0.0.1", "someStorageID-234",
23456, 23456, 23456, 23456),
new StorageInfo(HdfsServerConstants.NodeType.DATA_NODE),
new ExportedBlockKeys(), "test");
twoNodes.add(entry("127.0.0.1:12345"));
twoNodes.add(entry("127.0.0.1:23456"));
oneNode.add(entry("127.0.0.1:23456"));
hm.refresh(twoNodes, noNodes);
Whitebox.setInternalState(dm, "hostConfigManager", hm);
// Register two data nodes to simulate them coming up.
// We need to add two nodes, because if we have only one node, removing it
// will cause the includes list to be empty, which means all hosts will be
// allowed.
dm.registerDatanode(dr1);
dm.registerDatanode(dr2);
// Make sure that both nodes are reported
List<DatanodeDescriptor> both =
dm.getDatanodeListForReport(HdfsConstants.DatanodeReportType.ALL);
// Sort the list so that we know which one is which
Collections.sort(both);
Assert.assertEquals("Incorrect number of hosts reported",
2, both.size());
Assert.assertEquals("Unexpected host or host in unexpected position",
"127.0.0.1:12345", both.get(0).getInfoAddr());
Assert.assertEquals("Unexpected host or host in unexpected position",
"127.0.0.1:23456", both.get(1).getInfoAddr());
// Remove one node from includes, but do not add it to excludes.
hm.refresh(oneNode, noNodes);
// Make sure that only one node is still reported
List<DatanodeDescriptor> onlyOne =
dm.getDatanodeListForReport(HdfsConstants.DatanodeReportType.ALL);
Assert.assertEquals("Incorrect number of hosts reported",
1, onlyOne.size());
Assert.assertEquals("Unexpected host reported",
"127.0.0.1:23456", onlyOne.get(0).getInfoAddr());
// Remove all nodes from includes
hm.refresh(noNodes, noNodes);
// Check that both nodes are reported again
List<DatanodeDescriptor> bothAgain =
dm.getDatanodeListForReport(HdfsConstants.DatanodeReportType.ALL);
// Sort the list so that we know which one is which
Collections.sort(bothAgain);
Assert.assertEquals("Incorrect number of hosts reported",
2, bothAgain.size());
Assert.assertEquals("Unexpected host or host in unexpected position",
"127.0.0.1:12345", bothAgain.get(0).getInfoAddr());
Assert.assertEquals("Unexpected host or host in unexpected position",
"127.0.0.1:23456", bothAgain.get(1).getInfoAddr());
}
/**
* Verify the correctness of pending recovery process.
*
* @param numReplicationBlocks the number of replication blocks in the queue.
* @param numECBlocks number of EC blocks in the queue.
* @param maxTransfers the maxTransfer value.
* @param numReplicationTasks the number of replication tasks polled from
* the queue.
* @param numECTasks the number of EC tasks polled from the queue.
*
* @throws IOException
*/
private void verifyPendingRecoveryTasks(
int numReplicationBlocks, int numECBlocks,
int maxTransfers, int numReplicationTasks, int numECTasks)
throws IOException {
FSNamesystem fsn = Mockito.mock(FSNamesystem.class);
Mockito.when(fsn.hasWriteLock()).thenReturn(true);
Configuration conf = new Configuration();
DatanodeManager dm = Mockito.spy(mockDatanodeManager(fsn, conf));
DatanodeDescriptor nodeInfo = Mockito.mock(DatanodeDescriptor.class);
Mockito.when(nodeInfo.isRegistered()).thenReturn(true);
Mockito.when(nodeInfo.getStorageInfos())
.thenReturn(new DatanodeStorageInfo[0]);
if (numReplicationBlocks > 0) {
Mockito.when(nodeInfo.getNumberOfReplicateBlocks())
.thenReturn(numReplicationBlocks);
List<BlockTargetPair> tasks =
Collections.nCopies(
Math.min(numReplicationTasks, numReplicationBlocks),
new BlockTargetPair(null, null));
Mockito.when(nodeInfo.getReplicationCommand(numReplicationTasks))
.thenReturn(tasks);
}
if (numECBlocks > 0) {
Mockito.when(nodeInfo.getNumberOfBlocksToBeErasureCoded())
.thenReturn(numECBlocks);
List<BlockECReconstructionInfo> tasks =
Collections.nCopies(numECTasks, null);
Mockito.when(nodeInfo.getErasureCodeCommand(numECTasks))
.thenReturn(tasks);
}
DatanodeRegistration dnReg = Mockito.mock(DatanodeRegistration.class);
Mockito.when(dm.getDatanode(dnReg)).thenReturn(nodeInfo);
DatanodeCommand[] cmds = dm.handleHeartbeat(
dnReg, new StorageReport[1], "bp-123", 0, 0, 10, maxTransfers, 0, null,
SlowPeerReports.EMPTY_REPORT, SlowDiskReports.EMPTY_REPORT);
long expectedNumCmds = Arrays.stream(
new int[]{numReplicationTasks, numECTasks})
.filter(x -> x > 0)
.count();
assertEquals(expectedNumCmds, cmds.length);
int idx = 0;
if (numReplicationTasks > 0) {
assertTrue(cmds[idx] instanceof BlockCommand);
BlockCommand cmd = (BlockCommand) cmds[0];
assertEquals(numReplicationTasks, cmd.getBlocks().length);
assertEquals(numReplicationTasks, cmd.getTargets().length);
idx++;
}
if (numECTasks > 0) {
assertTrue(cmds[idx] instanceof BlockECReconstructionCommand);
BlockECReconstructionCommand cmd =
(BlockECReconstructionCommand) cmds[idx];
assertEquals(numECTasks, cmd.getECTasks().size());
}
Mockito.verify(nodeInfo).getReplicationCommand(numReplicationTasks);
Mockito.verify(nodeInfo).getErasureCodeCommand(numECTasks);
}
@Test
public void testPendingRecoveryTasks() throws IOException {
// Tasks are slitted according to the ratio between queue lengths.
verifyPendingRecoveryTasks(20, 20, 20, 10, 10);
verifyPendingRecoveryTasks(40, 10, 20, 16, 4);
// Approximately load tasks if the ratio between queue length is large.
verifyPendingRecoveryTasks(400, 1, 20, 20, 1);
}
@Test
public void testNetworkTopologyInstantiation() throws Exception {
// case 1, dfs.use.dfs.network.topology=true, use the default
// DFSNetworkTopology impl.
Configuration conf1 = new HdfsConfiguration();
FSNamesystem fsn = Mockito.mock(FSNamesystem.class);
DatanodeManager dm1 = mockDatanodeManager(fsn, conf1);
assertEquals(DFSNetworkTopology.class, dm1.getNetworkTopology().getClass());
// case 2, dfs.use.dfs.network.topology=false, use the default
// NetworkTopology impl.
Configuration conf2 = new HdfsConfiguration();
conf2.setBoolean(DFSConfigKeys.DFS_USE_DFS_NETWORK_TOPOLOGY_KEY, false);
DatanodeManager dm2 = mockDatanodeManager(fsn, conf2);
assertEquals(NetworkTopology.class, dm2.getNetworkTopology()
.getClass());
// case 3, dfs.use.dfs.network.topology=false, and specify the
// net.topology.impl property.
Configuration conf3 = new HdfsConfiguration();
conf3.setClass(CommonConfigurationKeysPublic.NET_TOPOLOGY_IMPL_KEY,
MockDfsNetworkTopology.class, NetworkTopology.class);
conf3.setBoolean(DFSConfigKeys.DFS_USE_DFS_NETWORK_TOPOLOGY_KEY, false);
DatanodeManager dm3 = mockDatanodeManager(fsn, conf3);
assertEquals(MockDfsNetworkTopology.class, dm3.getNetworkTopology()
.getClass());
// case 4, dfs.use.dfs.network.topology=true, and specify the
// dfs.net.topology.impl property.
Configuration conf4 = new HdfsConfiguration();
conf4.setClass(DFSConfigKeys.DFS_NET_TOPOLOGY_IMPL_KEY,
MockDfsNetworkTopology.class, NetworkTopology.class);
conf4.setBoolean(DFSConfigKeys.DFS_USE_DFS_NETWORK_TOPOLOGY_KEY, true);
DatanodeManager dm4 = mockDatanodeManager(fsn, conf4);
assertEquals(MockDfsNetworkTopology.class, dm4.getNetworkTopology()
.getClass());
}
/**
* A NetworkTopology implementation for test.
*
*/
public static class MockDfsNetworkTopology extends DFSNetworkTopology {
public MockDfsNetworkTopology(){
super();
}
}
}
|
package com.kailash.tutorial.swing.ch5;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
/**
* Created by kailash on 3/30/20.
*/
public class MainFrame extends JFrame {
private final TextPanel textPanel;
private final JButton btn;
public MainFrame() {
super("Hello World");
setLayout(new BorderLayout());
btn = new JButton("Click Me");
textPanel = new TextPanel();
btn.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
textPanel.appendText("Hello World \n");
}
});
add(textPanel, BorderLayout.CENTER);
add(btn, BorderLayout.SOUTH);
this.setSize(500, 500);
this.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
this.setVisible(true);
}
}
|
/*
* Copyright 2020 Frederic Thevenet
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright 2020 Frederic Thevenet
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.binjr.core.controllers;
import eu.binjr.common.javafx.controls.TimeRange;
import eu.binjr.common.javafx.richtext.CodeAreaHighlighter;
import eu.binjr.common.logging.Logger;
import eu.binjr.common.navigation.RingIterator;
import eu.binjr.core.data.adapters.DataAdapter;
import eu.binjr.core.data.adapters.SourceBinding;
import eu.binjr.core.data.async.AsyncTaskManager;
import eu.binjr.core.data.exceptions.DataAdapterException;
import eu.binjr.core.data.exceptions.NoAdapterFoundException;
import eu.binjr.core.data.timeseries.transform.SortTransform;
import eu.binjr.core.data.workspace.TextFilesWorksheet;
import eu.binjr.core.data.workspace.Worksheet;
import eu.binjr.core.dialogs.Dialogs;
import eu.binjr.core.preferences.UserPreferences;
import javafx.animation.PauseTransition;
import javafx.application.Platform;
import javafx.beans.binding.Bindings;
import javafx.beans.property.Property;
import javafx.beans.property.SimpleObjectProperty;
import javafx.beans.value.ChangeListener;
import javafx.fxml.FXML;
import javafx.scene.chart.XYChart;
import javafx.scene.control.*;
import javafx.scene.layout.AnchorPane;
import javafx.util.Duration;
import org.controlsfx.control.MaskerPane;
import org.fxmisc.richtext.CodeArea;
import org.fxmisc.richtext.LineNumberFactory;
import org.fxmisc.richtext.model.StyleSpans;
import java.net.URL;
import java.time.ZonedDateTime;
import java.util.*;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static java.util.stream.Collectors.groupingBy;
public class TextWorksheetController extends WorksheetController {
private static final Logger logger = Logger.create(TextWorksheetController.class);
public static final String WORKSHEET_VIEW_FXML = "/eu/binjr/views/TextWorksheetView.fxml";
private final TextFilesWorksheet worksheet;
private final Property<TimeRange> timeRangeProperty = new SimpleObjectProperty<>(TimeRange.of(ZonedDateTime.now().minusHours(1), ZonedDateTime.now()));
private StyleSpans<Collection<String>> syntaxHilightStyleSpans;
private RingIterator<CodeAreaHighlighter.SearchHitRange> searchHitIterator = RingIterator.of(Collections.emptyList());
public TextWorksheetController(MainViewController parent, TextFilesWorksheet worksheet, Collection<DataAdapter<String>> adapters)
throws NoAdapterFoundException {
super(parent);
this.worksheet = worksheet;
for (var d : worksheet.getSeriesInfo()) {
UUID id = d.getBinding().getAdapterId();
DataAdapter<String> da = adapters
.stream()
.filter(a -> (id != null && a != null && a.getId() != null) && id.equals(a.getId()))
.findAny()
.orElseThrow(() -> new NoAdapterFoundException("Failed to find a valid adapter with id " +
(id != null ? id.toString() : "null")));
d.getBinding().setAdapter(da);
}
}
@FXML
private AnchorPane root;
@FXML
private CodeArea textOutput;
@FXML
private ToggleButton wordWrapButton;
@FXML
private Button refreshButton;
@FXML
private Button searchHistoryButton;
@FXML
private TextField searchTextField;
@FXML
private Button clearSearchButton;
@FXML
private ToggleButton searchMatchCaseToggle;
@FXML
private ToggleButton searchRegExToggle;
@FXML
private Label searchResultsLabel;
@FXML
private Button prevOccurrenceButton;
@FXML
private Button nextOccurrenceButton;
@Override
public Worksheet getWorksheet() {
return worksheet;
}
@FXML
public MaskerPane busyIndicator;
@Override
public Property<TimeRange> selectedRangeProperty() {
return timeRangeProperty;
}
@Override
public Optional<ChartViewPort> getAttachedViewport(TitledPane pane) {
return Optional.empty();
}
@Override
public ContextMenu getChartListContextMenu(Collection<TreeItem<SourceBinding>> treeView) {
MenuItem item = new MenuItem(worksheet.getName());
item.setDisable(true);
return new ContextMenu(item);
}
@Override
public void setReloadRequiredHandler(Consumer<WorksheetController> action) {
}
@Override
public void refresh() {
invalidate(null, false, true);
}
public void invalidateAll(boolean saveToHistory, boolean dontPlotChart, boolean forceRefresh) {
invalidate(null, dontPlotChart, forceRefresh);
}
public void invalidate(ChartViewPort viewPort, boolean dontPlot, boolean forceRefresh) {
if (forceRefresh) {
loadFile();
}
}
@Override
public void saveSnapshot() {
}
@Override
public void toggleShowPropertiesPane() {
}
@Override
public void setShowPropertiesPane(boolean value) {
}
@Override
public List<ChartViewPort> getViewPorts() {
return new ArrayList<>();
}
@Override
public void close() {
}
@Override
public String getView() {
return WORKSHEET_VIEW_FXML;
}
@Override
public void initialize(URL location, ResourceBundle resources) {
getBindingManager().attachListener(worksheet.textViewFontSizeProperty(),
(ChangeListener<Integer>) (obs, oldVal, newVal) -> textOutput.setStyle("-fx-font-size: " + newVal + "pt;"));
textOutput.setParagraphGraphicFactory(LineNumberFactory.get(textOutput));
textOutput.setEditable(false);
getBindingManager().bind(textOutput.wrapTextProperty(), wordWrapButton.selectedProperty());
refreshButton.setOnAction(getBindingManager().registerHandler(event -> refresh()));
//Search bar initialization
prevOccurrenceButton.setOnAction(getBindingManager().registerHandler(event -> {
if (searchHitIterator.hasPrevious()) {
focusOnSearchHit(searchHitIterator.previous());
}
}));
nextOccurrenceButton.setOnAction(getBindingManager().registerHandler(event -> {
if (searchHitIterator.hasNext()) {
focusOnSearchHit(searchHitIterator.next());
}
}));
clearSearchButton.setOnAction(getBindingManager().registerHandler(event -> searchTextField.clear()));
bindingManager.bind(clearSearchButton.visibleProperty(),
Bindings.createBooleanBinding(() -> !searchTextField.getText().isBlank(), searchTextField.textProperty()));
// Delay the search until at least the following amount of time elapsed since the last character was entered
var delay = new PauseTransition(Duration.millis(UserPreferences.getInstance().searchFieldInputDelayMs.get().intValue()));
getBindingManager().attachListener(searchTextField.textProperty(),
(ChangeListener<String>) (obs, oldText, newText) -> {
delay.setOnFinished(event -> doSearchHighlight(newText,
searchMatchCaseToggle.isSelected(),
searchRegExToggle.isSelected()));
delay.playFromStart();
});
getBindingManager().attachListener(searchMatchCaseToggle.selectedProperty(),
(ChangeListener<Boolean>) (obs, oldVal, newVal) ->
doSearchHighlight(searchTextField.getText(), newVal, searchRegExToggle.isSelected()));
getBindingManager().attachListener(searchRegExToggle.selectedProperty(),
(ChangeListener<Boolean>) (obs, oldVal, newVal) ->
doSearchHighlight(searchTextField.getText(), searchMatchCaseToggle.isSelected(), newVal));
Platform.runLater(this::refresh);
super.initialize(location, resources);
}
private void focusOnSearchHit(CodeAreaHighlighter.SearchHitRange hit) {
if (hit == null) {
textOutput.selectRange(0, 0);
searchResultsLabel.setText("No results");
} else {
textOutput.selectRange(hit.getStart(), hit.getEnd());
textOutput.requestFollowCaret();
searchResultsLabel.setText(String.format("%d/%d",
searchHitIterator.peekCurrentIndex() + 1,
searchHitIterator.peekLastIndex() + 1));
}
}
private void doSearchHighlight(String searchText, boolean matchCase, boolean regEx) {
var searchResults = CodeAreaHighlighter.computeSearchHitsHighlighting(textOutput.getText(), searchText, matchCase, regEx);
prevOccurrenceButton.setDisable(searchResults.getSearchHitRanges().isEmpty());
nextOccurrenceButton.setDisable(searchResults.getSearchHitRanges().isEmpty());
searchHitIterator = RingIterator.of(searchResults.getSearchHitRanges());
searchResultsLabel.setText(searchResults.getSearchHitRanges().size() + " results");
if (syntaxHilightStyleSpans != null) {
textOutput.setStyleSpans(0, syntaxHilightStyleSpans.overlay(searchResults.getStyleSpans(),
(strings, strings2) -> Stream.concat(strings.stream(),
strings2.stream()).collect(Collectors.toCollection(ArrayList<String>::new))));
} else {
textOutput.setStyleSpans(0, searchResults.getStyleSpans());
}
if (searchHitIterator.hasNext()) {
focusOnSearchHit(searchHitIterator.next());
} else {
focusOnSearchHit(null);
}
}
public void fetchDataFromSources() throws DataAdapterException {
// prune series from closed adapters
worksheet.getSeriesInfo().removeIf(seriesInfo -> {
if (seriesInfo.getBinding().getAdapter().isClosed()) {
logger.debug(() -> seriesInfo.getDisplayName() + " will be pruned because attached adapter " +
seriesInfo.getBinding().getAdapter().getId() + " is closed.");
return true;
}
return false;
});
var bindingsByAdapters = worksheet.getSeriesInfo().stream().collect(groupingBy(o -> o.getBinding().getAdapter()));
for (var byAdapterEntry : bindingsByAdapters.entrySet()) {
// Define the transforms to apply
var adapter = (DataAdapter<String>) byAdapterEntry.getKey();
var sort = new SortTransform();
sort.setEnabled(adapter.isSortingRequired());
// Group all queries with the same adapter and path
var bindingsByPath = byAdapterEntry.getValue().stream().collect(groupingBy(o -> o.getBinding().getPath()));
for (var byPathEntry : bindingsByPath.entrySet()) {
String path = byPathEntry.getKey();
logger.trace("Fetch sub-task '" + path + "' started");
// Get data from the adapter
var range = adapter.getInitialTimeRange(path, byPathEntry.getValue());
var data = adapter.fetchData(
path,
range.getBeginning().toInstant(),
range.getEnd().toInstant(),
byPathEntry.getValue(),
true);
data.entrySet().parallelStream().forEach(entry -> {
var info = entry.getKey();
var proc = entry.getValue();
//bind proc to timeSeries info
info.setProcessor(proc);
});
}
}
}
private void loadFile() {
try {
AsyncTaskManager.getInstance().submit(() -> {
busyIndicator.setVisible(true);
fetchDataFromSources();
return worksheet.getSeriesInfo().stream()
.map(info -> info.getProcessor()
.getData()
.stream()
.map(XYChart.Data::getYValue)
.collect(Collectors.joining()))
.collect(Collectors.joining());
},
event -> {
busyIndicator.setVisible(false);
String data = (String) event.getSource().getValue();
textOutput.clear();
textOutput.replaceText(0, 0, data);
if (worksheet.isSyntaxHighlightEnabled()) {
this.syntaxHilightStyleSpans = CodeAreaHighlighter.computeXmlSyntaxHighlighting(textOutput.getText());
textOutput.setStyleSpans(0, syntaxHilightStyleSpans);
}
}, event -> {
busyIndicator.setVisible(false);
Dialogs.notifyException("An error occurred while loading text file: " +
event.getSource().getException().getMessage(),
event.getSource().getException(),
root);
});
} catch (Exception e) {
Dialogs.notifyException(e);
}
}
}
|
package datalog_ra.base.TupleTransformation;
import datalog_ra.base.dataStructures.Attribute;
import datalog_ra.base.dataStructures.Tuple;
import java.util.LinkedList;
import java.util.List;
/**
* Transformation, that creates a new tuple containing attributes of source
* tuple in order given by attributeOrder. Missing attributes are discarded.
*
* @author Jakub
*/
public class ProjectionTransformation implements TupleTransformation {
private final List<Integer> attributeOrder;
public ProjectionTransformation(List<Integer> attributeOrder) {
this.attributeOrder = attributeOrder;
}
@Override
public Tuple transform(Tuple tuple) {
if (tuple == null) {
return null;
}
LinkedList<Attribute> attribs = new LinkedList<>();
for (Integer index : attributeOrder) {
attribs.add(tuple.get(index));
}
return new Tuple(attribs);
}
}
|
package day71;
/**
* Created by Gaurav on 27/04/18.
*/
public class ComplexNumber {
public static void main(String[] args) {
ComplexNumber complexNumber = new ComplexNumber();
complexNumber.complexNumberMultiply("-1+-1i"
, "1+-1i");
}
public String complexNumberMultiply(String a, String b) {
//(a1+b1)*(a2+b2) = a1a2 + a1b2+ b1a2+ b1b2;
String[] asplt = a.split("\\+");
String[] bsplt = b.split("\\+");
int a1 = Integer.parseInt(asplt[0]);
int b1 = Integer.parseInt(asplt[1].replace("i", ""));
int a2 = Integer.parseInt(bsplt[0]);
int b2 = Integer.parseInt(bsplt[1].replace("i", ""));
int a1a2 = a1 * a2;
int b1b2 = -1* b1 * b2;
int a1b2i = (a1 * b2 + b1 * a2);
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(a1a2 + b1b2).append("+").append(a1b2i).append("i");
return stringBuilder.toString();
}
}
|
package com.lt.hm.wovideo.http;
import android.content.Context;
import android.util.Log;
import com.google.gson.Gson;
import com.lt.hm.wovideo.utils.StringUtils;
import com.lt.hm.wovideo.utils.TLog;
import com.zhy.http.okhttp.OkHttpUtils;
import com.zhy.http.okhttp.builder.GetBuilder;
import com.zhy.http.okhttp.builder.PostFileBuilder;
import com.zhy.http.okhttp.builder.PostFormBuilder;
import com.zhy.http.okhttp.builder.PostStringBuilder;
import com.zhy.http.okhttp.callback.Callback;
import com.zhy.http.okhttp.callback.FileCallBack;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* @author leonardo
* @version 1.0
* @create_date 16/5/29
*/
public class HttpUtils {
//内网测试
// public final static String HOST = "http://172.16.10.15:9100";
// private static String API_URL = "http://172.16.10.15:9100/wsp-web-restservice/%s";
//外网
// public final static String HOST = "http://59.108.94.40:9100";
// private static String API_URL = "http://59.108.94.40:9100/wsp-web-restservice/%s";
public final static String HOST = "http://111.206.135.50:8080";
private static String API_URL = "http://111.206.133.134:8080/wsp-web-restservice/%s";
public static final String DELETE = "DELETE";
public static final String GET = "GET";
public static final String POST = "POST";
public static final String PUT = "PUT";
public HttpUtils() {
}
public static void clearUserCookies(Context context) {
// (new HttpClientCookieStore(context)).a();
}
public static void formPost(String url, HashMap<String, Object> maps, Callback<?> callback) {
formPost(url, maps, -1, callback);
}
public static void formPost(String url, HashMap<String, Object> maps, int flag, Callback<?> callback) {
PostFormBuilder builder;
TLog.error("flag---" + flag);
if (flag == -1) {//不加flag
builder = OkHttpUtils.post().url(getAbsoluteApiUrl(url));
} else {
builder = OkHttpUtils.post().url(getAbsoluteApiUrl(url)).id(flag);
}
if (maps.size() > 0) {
Set<String> keys = maps.keySet();
for (String key :
keys) {
if (!StringUtils.isNullOrEmpty(maps.get(key))) {
builder.addParams(key, maps.get(key).toString());
}
}
}
TLog.log("request_params" + maps.toString());
builder.build().execute(callback);
}
public static void stringPost(String url, Object obj, Callback<?> callback) {
PostStringBuilder builder = OkHttpUtils.postString().url(getAbsoluteApiUrl(url));
builder.content(new Gson().toJson(obj));
builder.build().execute(callback);
}
/**
* 单文件上传
*
* @param url
* @param file
* @param callback
*/
public static void filePost(String url, File file, FileCallBack callback) {
PostFileBuilder builder = OkHttpUtils.postFile().url(getAbsoluteApiUrl(url));
builder.file(file);
builder.build().execute(callback);
}
/**
* 多文件上传
*
* @param url
* @param files
* @param headers
* @param callBack
*/
public static void multiFilePost(String url, Map<String, File> files, Map<String, String> headers, FileCallBack callBack) {
PostFormBuilder builder = OkHttpUtils.post().url(url);
for (String key :
files.keySet()) {
builder.addFile("mFile", key, files.get(key));
}
builder.params(headers);
builder.build().execute(callBack);
}
//单一文件下载(注意下载文件可以使用FileCallback,需要传入文件需要保存的文件夹以及文件名)
public static void downFile(String url, FileCallBack callback) {
GetBuilder builder = OkHttpUtils.get().url(url);
builder.build().execute(callback);
}
/**
* GET 方法
*
* @param url
* @param maps
* @param callback
*/
public static void formGet(String url, HashMap<String, Object> maps, Callback callback) {
GetBuilder builder = OkHttpUtils.get().url(getAbsoluteApiUrl(url));
if (maps.size() > 0) {
Set<String> keys = maps.keySet();
for (String key :
keys) {
builder.addParams(key, maps.get(key).toString());
}
}
builder.build().execute(callback);
}
/**
* 拼接URL(非%s 形式)
*
* @param partUrl
* @return
*/
public static String appendUrl(String partUrl) {
if (partUrl != null && !partUrl.startsWith("http:") && !partUrl.startsWith("https:")) {
partUrl = HOST + partUrl;
}
return partUrl;
}
/**
* 拼接 URL
*
* @param partUrl
* @return
*/
public static String getAbsoluteApiUrl(String partUrl) {
String url = partUrl;
if (!partUrl.startsWith("http:") && !partUrl.startsWith("https:")) {
url = String.format(API_URL, partUrl);
}
Log.d("BASE_CLIENT", "request:" + url);
return url;
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.byteCode.expression;
import com.facebook.presto.byteCode.Block;
import com.facebook.presto.byteCode.ByteCodeNode;
import com.facebook.presto.byteCode.MethodGenerationContext;
import com.facebook.presto.byteCode.instruction.LabelNode;
import com.google.common.collect.ImmutableList;
import java.util.List;
import static com.facebook.presto.byteCode.ParameterizedType.type;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
class OrByteCodeExpression
extends ByteCodeExpression
{
private final ByteCodeExpression left;
private final ByteCodeExpression right;
OrByteCodeExpression(ByteCodeExpression left, ByteCodeExpression right)
{
super(type(boolean.class));
this.left = checkNotNull(left, "left is null");
checkArgument(left.getType().getPrimitiveType() == boolean.class, "Expected left to be type boolean but is %s", left.getType());
this.right = checkNotNull(right, "right is null");
checkArgument(right.getType().getPrimitiveType() == boolean.class, "Expected right to be type boolean but is %s", right.getType());
}
@Override
public ByteCodeNode getByteCode(MethodGenerationContext generationContext)
{
LabelNode trueLabel = new LabelNode("true");
LabelNode endLabel = new LabelNode("end");
return new Block()
.append(left)
.ifTrueGoto(trueLabel)
.append(right)
.ifTrueGoto(trueLabel)
.push(false)
.gotoLabel(endLabel)
.visitLabel(trueLabel)
.push(true)
.visitLabel(endLabel);
}
@Override
public List<ByteCodeNode> getChildNodes()
{
return ImmutableList.<ByteCodeNode>of(left, right);
}
@Override
protected String formatOneLine()
{
return "(" + left + " || " + right + ")";
}
}
|
package com.asofdate.hauth.dao.impl;
import com.asofdate.hauth.dao.UserDetailsDao;
import com.asofdate.hauth.entity.UserDetailsEntity;
import com.asofdate.sql.SqlDefine;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Repository;
import java.util.List;
/**
* Created by hzwy23 on 2017/5/18.
*/
@Repository
public class UserDetailsDaoImpl implements UserDetailsDao {
@Autowired
public JdbcTemplate jdbcTemplate;
@Override
public List findById(String userId) {
RowMapper<UserDetailsEntity> rowMapper = new BeanPropertyRowMapper<UserDetailsEntity>(UserDetailsEntity.class);
List list = jdbcTemplate.query(SqlDefine.sys_rdbms_023, rowMapper, userId);
return list;
}
}
|
/**
* Copyright 2016-2021 The Reaktivity Project
*
* The Reaktivity Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.reaktivity.nukleus.http_cache.internal.stream;
import static org.reaktivity.nukleus.budget.BudgetDebitor.NO_DEBITOR_INDEX;
import static org.reaktivity.nukleus.http_cache.internal.proxy.cache.DefaultCacheEntry.NUM_OF_HEADER_SLOTS;
import java.time.Instant;
import java.util.function.Consumer;
import org.agrona.DirectBuffer;
import org.agrona.MutableDirectBuffer;
import org.reaktivity.nukleus.budget.BudgetDebitor;
import org.reaktivity.nukleus.buffer.BufferPool;
import org.reaktivity.nukleus.function.MessageConsumer;
import org.reaktivity.nukleus.http_cache.internal.proxy.cache.DefaultCacheEntry;
import org.reaktivity.nukleus.http_cache.internal.proxy.cache.SurrogateControl;
import org.reaktivity.nukleus.http_cache.internal.types.Array32FW;
import org.reaktivity.nukleus.http_cache.internal.types.HttpHeaderFW;
import org.reaktivity.nukleus.http_cache.internal.types.OctetsFW;
import org.reaktivity.nukleus.http_cache.internal.types.stream.ResetFW;
import org.reaktivity.nukleus.http_cache.internal.types.stream.SignalFW;
import org.reaktivity.nukleus.http_cache.internal.types.stream.WindowFW;
final class HttpCacheProxyCachedResponse
{
private final HttpCacheProxyFactory factory;
private final MessageConsumer reply;
private final DefaultCacheEntry cacheEntry;
private final long routeId;
private final long replyId;
private final long authorization;
private final boolean promiseNextPollRequest;
private long replySeq;
private long replyAck;
private int replyMax;
private int replyPad;
private long replyDebitorId;
private BudgetDebitor replyDebitor;
private long replyDebitorIndex = NO_DEBITOR_INDEX;
private int responseProgress = -1;
private Consumer<HttpCacheProxyCachedResponse> resetHandler;
HttpCacheProxyCachedResponse(
HttpCacheProxyFactory factory,
MessageConsumer reply,
long routeId,
long replyId,
long authorization,
long replyBudgetId,
long replySeq,
long replyAck,
int replyMax,
int replyPad,
int requestHash,
boolean promiseNextPollRequest,
Consumer<HttpCacheProxyCachedResponse> resetHandler)
{
this.factory = factory;
this.reply = reply;
this.routeId = routeId;
this.replyId = replyId;
this.authorization = authorization;
this.cacheEntry = factory.defaultCache.lookup(requestHash);
this.promiseNextPollRequest = promiseNextPollRequest;
this.resetHandler = resetHandler;
updateBudget(replyBudgetId, replySeq, replyAck, replyMax, replyPad);
}
void onResponseMessage(
int msgTypeId,
DirectBuffer buffer,
int index,
int length)
{
switch (msgTypeId)
{
case ResetFW.TYPE_ID:
final ResetFW reset = factory.resetRO.wrap(buffer, index, index + length);
onResponseReset(reset);
break;
case WindowFW.TYPE_ID:
final WindowFW window = factory.windowRO.wrap(buffer, index, index + length);
onResponseWindow(window);
break;
case SignalFW.TYPE_ID:
assert false;
break;
default:
break;
}
}
void doResponseBegin(
Instant now,
long traceId)
{
Array32FW<HttpHeaderFW> responseHeaders = cacheEntry.getCachedResponseHeaders();
factory.router.setThrottle(replyId, this::onResponseMessage);
final Array32FW<HttpHeaderFW> requestHeaders = cacheEntry.getRequestHeaders();
factory.writer.doHttpResponseWithUpdatedHeaders(reply,
routeId,
replyId,
replySeq,
replyAck,
replyMax,
responseHeaders,
requestHeaders,
cacheEntry.etag(),
cacheEntry.isStale(now),
traceId);
responseProgress = 0;
doResponseFlush(traceId);
factory.counters.responses.getAsLong();
factory.counters.responsesCached.getAsLong();
}
void doResponseFlush(
long traceId)
{
final int remaining = cacheEntry.responseSize() - responseProgress;
final int replyNoAck = (int)(replySeq - replyAck);
final int writable = Math.min(replyMax - replyNoAck - replyPad, remaining);
if (writable > 0)
{
final int maximum = writable + replyPad;
final int minimum = Math.min(maximum, 1024 + replyPad);
int claimed = maximum;
if (replyDebitorIndex != NO_DEBITOR_INDEX)
{
claimed = replyDebitor.claim(traceId, replyDebitorIndex, replyId, minimum, maximum, 0);
}
final int reserved = claimed;
final int writableMax = reserved - replyPad;
if (writableMax > 0)
{
final BufferPool cacheResponsePool = factory.defaultCache.getResponsePool();
factory.writer.doHttpData(
reply,
routeId,
replyId,
replySeq,
replyAck,
replyMax,
traceId,
replyDebitorId,
reserved,
p -> buildResponsePayload(responseProgress, writableMax, p, cacheResponsePool));
responseProgress += writableMax;
replySeq += reserved;
assert replyAck <= replySeq;
}
}
if (cacheEntry.isResponseCompleted() && responseProgress == cacheEntry.responseSize())
{
doResponseEnd(traceId);
}
}
void doResponseAbort(
long traceId)
{
factory.writer.doAbort(reply,
routeId,
replyId,
replySeq,
replyAck,
replyMax,
traceId);
cleanupResponseIfNecessary();
}
private void doResponseEnd(
long traceId)
{
assert responseProgress == cacheEntry.responseSize();
final Array32FW<HttpHeaderFW> cachedResponseHeaders = cacheEntry.getCachedResponseHeaders();
int freshnessExtension = SurrogateControl.getSurrogateFreshnessExtension(cachedResponseHeaders);
if (promiseNextPollRequest && freshnessExtension > 0)
{
factory.counters.promises.getAsLong();
factory.writer.doHttpPushPromise(reply,
routeId,
replyId,
replySeq,
replyAck,
replyMax,
authorization,
cacheEntry.getRequestHeaders(),
cachedResponseHeaders,
cacheEntry.etag());
}
factory.writer.doHttpEnd(reply,
routeId,
replyId,
replySeq,
replyAck,
replyMax,
traceId);
cleanupResponseIfNecessary();
resetHandler.accept(this);
}
private void onResponseReset(
ResetFW reset)
{
cleanupResponseIfNecessary();
resetHandler.accept(this);
}
private void onResponseWindow(
WindowFW window)
{
final long traceId = window.traceId();
updateBudget(window.budgetId(), window.sequence(), window.acknowledge(), window.maximum(), window.padding());
doResponseFlush(traceId);
}
private void updateBudget(
long budgetId,
long sequence,
long acknowledge,
int maximum,
int padding)
{
replyDebitorId = budgetId;
replySeq = sequence;
replyAck = acknowledge;
replyMax = maximum;
replyPad = padding;
if (replyDebitorId != 0L && replyDebitor == null)
{
replyDebitor = factory.supplyDebitor.apply(replyDebitorId);
replyDebitorIndex = replyDebitor.acquire(replyDebitorId, replyId, this::doResponseFlush);
}
}
private void buildResponsePayload(
int index,
int length,
OctetsFW.Builder p,
BufferPool bp)
{
final int slotCapacity = bp.slotCapacity();
final int startSlot = Math.floorDiv(index, slotCapacity) + NUM_OF_HEADER_SLOTS;
buildResponsePayload(index, length, p, bp, startSlot);
}
private void buildResponsePayload(
int index,
int length,
OctetsFW.Builder builder,
BufferPool bp,
int slotCnt)
{
if (length == 0)
{
return;
}
final int slotCapacity = bp.slotCapacity();
int chunkedWrite = (slotCnt * slotCapacity) - index;
int slot = cacheEntry.getResponseSlots().get(slotCnt);
if (chunkedWrite > 0)
{
MutableDirectBuffer buffer = bp.buffer(slot);
int offset = slotCapacity - chunkedWrite;
int chunkLength = Math.min(chunkedWrite, length);
builder.put(buffer, offset, chunkLength);
index += chunkLength;
length -= chunkLength;
}
buildResponsePayload(index, length, builder, bp, ++slotCnt);
}
private void cleanupResponseIfNecessary()
{
if (replyDebitorIndex != NO_DEBITOR_INDEX)
{
replyDebitor.release(replyDebitorIndex, replyId);
replyDebitorIndex = NO_DEBITOR_INDEX;
replyDebitor = null;
}
}
}
|
/*
* Copyright (c) 2012 - 2020 Arvato Systems GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.arvatosystems.t9t.zkui.converters.grid;
import java.time.Instant;
import com.arvatosystems.t9t.zkui.session.ApplicationSession;
import de.jpaw.bonaparte.core.BonaPortable;
import de.jpaw.bonaparte.pojos.meta.FieldDefinition;
import de.jpaw.dp.Named;
import de.jpaw.dp.Singleton;
@Singleton
@Named("java.time.Instant")
public class JavaInstantConverter implements IItemConverter<Instant> {
/**
* Returns a formatted string.
*/
@Override
public String getFormattedLabel(Instant value, BonaPortable wholeDataObject, String fieldName, FieldDefinition meta) {
return ApplicationSession.get().format(value);
}
}
|
package com.cg.demo.multi;
public class ThreadDemo2 implements Runnable {
public void printLoop() { // bookMovieTickets()
for (int i = 1; i <= 10; i++) {
try {
Thread.sleep(250);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println(i);
}
}
@Override
public void run() {
this.printLoop();
}
public static void main(String[] args) throws InterruptedException {
Thread obj = new Thread(new ThreadDemo2());
Thread obj2 = new Thread(new ThreadDemo2());
Thread obj3 = new Thread(new ThreadDemo2());
obj.start();
obj.join();
// System.out.println(obj.getName());
obj.setName("Thread-0");
System.out.println(obj.getName());
System.out.println(obj2.getName());
System.out.println(obj3.getName());
obj.setPriority(3);
obj2.setPriority(6);
obj3.setPriority(9);
System.out.println(obj.getPriority());
System.out.println(obj2.getPriority());
System.out.println(obj3.getPriority());
obj2.start();
obj3.start();
// obj.printLoop();
// obj2.printLoop();
// obj3.printLoop();
}
}
|
/*
* MIT License
*
* Copyright (c) 2018 Ensar Sarajčić
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.ensarsarajcic.neovim.java.corerpc.client;
import com.ensarsarajcic.neovim.java.corerpc.message.*;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
public class PackStreamTest {
@Mock
RPCListener rpcListener;
@Mock
RPCSender rpcSender;
@Mock
MessageIdGenerator messageIdGenerator;
@InjectMocks
PackStream packStream;
@Mock
InputStream inputStream;
@Mock
OutputStream outputStream;
private RPCConnection connection;
private ArgumentCaptor<RPCListener.RequestCallback> packStreamRequestCallback;
private ArgumentCaptor<RPCListener.NotificationCallback> packStreamNotificationCallback;
@Before
public void setUp() throws Exception {
prepareListeners();
connection = new RPCConnection() {
@Override
public InputStream getIncomingStream() {
return inputStream;
}
@Override
public OutputStream getOutgoingStream() {
return outputStream;
}
@Override
public void close() throws IOException {
}
};
}
@Test
public void testAttach() {
// When attach is called
packStream.attach(connection);
// Rpc listener and sender should be prepared
verify(rpcListener).listenForRequests(any());
verify(rpcListener).listenForNotifications(any());
verify(rpcListener).start(inputStream);
verify(rpcSender).attach(outputStream);
}
@Test
public void testSend() throws IOException {
// When send is called
Message message = () -> null;
packStream.send(message);
// Rpc sender should be used
verify(rpcSender).send(message);
}
@Test
public void testSendRequest() throws IOException {
// Given a proper message id generator
given(messageIdGenerator.nextId()).willReturn(25);
// When send is called
var message = new RequestMessage.Builder("test");
packStream.send(message);
// Rpc sender should be used
var argumentCaptor = ArgumentCaptor.forClass(RequestMessage.class);
verify(rpcSender).send(argumentCaptor.capture());
assertEquals("test", argumentCaptor.getValue().getMethod());
// And id for the message should be generated
verify(messageIdGenerator).nextId();
// And put into the message
assertEquals(25, argumentCaptor.getValue().getId());
}
@Test
public void testSendRequestWithCallback() throws IOException {
// Given a proper message id generator
given(messageIdGenerator.nextId()).willReturn(25);
// And callback
var responseCallback = Mockito.mock(RPCListener.ResponseCallback.class);
// When send is called
var message = new RequestMessage.Builder("test");
packStream.send(message, responseCallback);
// Rpc sender should be used
var argumentCaptor = ArgumentCaptor.forClass(RequestMessage.class);
verify(rpcSender).send(argumentCaptor.capture());
assertEquals("test", argumentCaptor.getValue().getMethod());
// And id for the message should be generated
verify(messageIdGenerator).nextId();
// And put into the message
assertEquals(25, argumentCaptor.getValue().getId());
// RPC Listener should be used too
verify(rpcListener).listenForResponse(25, responseCallback);
}
@Test
public void testRequestCallback() throws IOException {
// Given a proper rpc listener and attached pack stream
packStream.attach(connection);
// When request callback is added
var firstCallback = Mockito.mock(RPCListener.RequestCallback.class);
packStream.addRequestCallback(firstCallback);
// It should receive requests
var msg1 = new RequestMessage.Builder("test").build();
packStreamRequestCallback.getValue().requestReceived(msg1);
verify(firstCallback).requestReceived(msg1);
// Multiple callbacks should be supported too
var secondCallback = Mockito.mock(RPCListener.RequestCallback.class);
packStream.addRequestCallback(secondCallback);
// Both should receive messages
var msg2 = new RequestMessage.Builder("test2").build();
packStreamRequestCallback.getValue().requestReceived(msg2);
verify(firstCallback).requestReceived(msg2);
verify(secondCallback).requestReceived(msg2);
// Removing should be supported
packStream.removeRequestCallback(firstCallback);
// Only second should receive message now
var msg3 = new RequestMessage.Builder("test3").build();
packStreamRequestCallback.getValue().requestReceived(msg3);
verify(firstCallback, never()).requestReceived(msg3);
verify(secondCallback).requestReceived(msg3);
// Multiple removals
packStream.removeRequestCallback(secondCallback);
// None should receive message now
var msg4 = new RequestMessage.Builder("test4").build();
packStreamRequestCallback.getValue().requestReceived(msg4);
verify(firstCallback, never()).requestReceived(msg4);
verify(secondCallback, never()).requestReceived(msg4);
}
@Test
public void testNotificationCallback() throws IOException {
// Given a proper rpc listener and attached pack stream
packStream.attach(connection);
// When notification callback is added
var firstCallback = Mockito.mock(RPCListener.NotificationCallback.class);
packStream.addNotificationCallback(firstCallback);
// It should receive requests
var msg1 = new NotificationMessage.Builder("test").build();
packStreamNotificationCallback.getValue().notificationReceived(msg1);
verify(firstCallback).notificationReceived(msg1);
// Multiple callbacks should be supported too
var secondCallback = Mockito.mock(RPCListener.NotificationCallback.class);
packStream.addNotificationCallback(secondCallback);
// Both should receive messages
var msg2 = new NotificationMessage.Builder("test2").build();
packStreamNotificationCallback.getValue().notificationReceived(msg2);
verify(firstCallback).notificationReceived(msg2);
verify(secondCallback).notificationReceived(msg2);
// Removing should be supported
packStream.removeNotificationCallback(firstCallback);
// Only second should receive message now
var msg3 = new NotificationMessage.Builder("test3").build();
packStreamNotificationCallback.getValue().notificationReceived(msg3);
verify(firstCallback, never()).notificationReceived(msg3);
verify(secondCallback).notificationReceived(msg3);
// Multiple removals
packStream.removeNotificationCallback(secondCallback);
// None should receive message now
var msg4 = new NotificationMessage.Builder("test4").build();
packStreamNotificationCallback.getValue().notificationReceived(msg4);
verify(firstCallback, never()).notificationReceived(msg4);
verify(secondCallback, never()).notificationReceived(msg4);
}
@Test(expected = NullPointerException.class)
public void noNullRpcListener() {
// When null rpc listener is passed, constructor should throw an exception
try {
new PackStream(rpcSender, null);
fail("Constructor did not throw an exception");
} catch (NullPointerException ex) {
// pass
}
new PackStream(rpcSender, null, messageIdGenerator);
}
@Test(expected = NullPointerException.class)
public void noNullRpcSender() {
// When null rpc sender is passed, constructor should throw an exception
try {
new PackStream(null, rpcListener);
fail("Constructor did not throw an exception");
} catch (NullPointerException ex) {
// pass
}
new PackStream(null, rpcListener, messageIdGenerator);
}
@Test(expected = NullPointerException.class)
public void noNullMessageIdGenerator() {
// When null message id generator is passed, constructor should throw an exception
new PackStream(rpcSender, rpcListener, null);
}
private void prepareListeners() {
packStreamNotificationCallback = ArgumentCaptor.forClass(RPCListener.NotificationCallback.class);
doNothing().when(rpcListener).listenForNotifications(packStreamNotificationCallback.capture());
packStreamRequestCallback = ArgumentCaptor.forClass(RPCListener.RequestCallback.class);
doNothing().when(rpcListener).listenForRequests(packStreamRequestCallback.capture());
}
}
|
package io.healthe.util;
import android.annotation.SuppressLint;
import android.app.ActivityManager;
import android.content.Context;
import android.support.annotation.NonNull;
import com.bumptech.glide.GlideBuilder;
import com.bumptech.glide.annotation.GlideModule;
import com.bumptech.glide.load.DecodeFormat;
import com.bumptech.glide.module.AppGlideModule;
import com.bumptech.glide.request.RequestOptions;
/**
* Required GlideModule for v4 API
*/
@GlideModule
public class HealtheGlideModule extends AppGlideModule {
@Override
public boolean isManifestParsingEnabled() {
return false;
}
@SuppressLint("CheckResult")
@Override
public void applyOptions(@NonNull Context context, @NonNull GlideBuilder builder) {
RequestOptions defaultOptions = new RequestOptions();
//Prefer higher image quality on low ram devices
ActivityManager activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
if (activityManager != null) {
if (activityManager.isLowRamDevice())
defaultOptions.format(DecodeFormat.PREFER_RGB_565);
else defaultOptions.format(DecodeFormat.PREFER_ARGB_8888);
}
defaultOptions.disallowHardwareConfig();
builder.setDefaultRequestOptions(defaultOptions);
}
}
|
package com.jtelegram.api.requests.chat.admin;
import com.jtelegram.api.chat.id.ChatId;
import com.jtelegram.api.ex.TelegramException;
import com.jtelegram.api.requests.message.framework.req.UserAdminChatRequest;
import lombok.Builder;
import lombok.Getter;
import lombok.ToString;
import java.util.function.Consumer;
@Getter
@ToString
public class UnbanChatMember extends UserAdminChatRequest {
@Builder
public UnbanChatMember(Consumer<TelegramException> errorHandler, Runnable callback, ChatId chatId, Long userId) {
super("unbanChatMember", errorHandler, callback, chatId, userId);
}
}
|
package command;
import java.util.List;
import parser.Node;
import workspace.Workspace;
public class TurtlesCommand extends Command {
public TurtlesCommand (String s) {
super(s, 0);
}
@Override
public double execute (List<Node> inputs, Workspace workspace) throws Exception {
return workspace.getTurtles().size();
}
}
|
package com.zednight.controller.adminPage.caddy;
import cn.hutool.core.codec.Base64;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.net.URLDecoder;
import cn.hutool.core.net.URLEncoder;
import cn.hutool.core.util.CharsetUtil;
import cn.hutool.core.util.RuntimeUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.zednight.config.InitConfig;
import com.zednight.config.VersionConfig;
import com.zednight.controller.adminPage.MainController;
import com.zednight.ext.ConfExt;
import com.zednight.ext.ConfFile;
import com.zednight.service.*;
import com.zednight.utils.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.ClassPathResource;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.util.List;
@Controller
@RequestMapping("/adminPage/caddyfile")
public class CaddyfileController extends BaseController {
@Autowired
UpstreamService upstreamService;
@Autowired
SettingService settingService;
@Autowired
ServerService serverService;
@Autowired
CaddyfileService caddyService;
@Autowired
MainController mainController;
@Autowired
VersionConfig versionConfig;
@Value("${project.version}")
String currentVersion;
@RequestMapping("")
public ModelAndView index(ModelAndView modelAndView) {
String caddyPath = settingService.get("caddyPath");
modelAndView.addObject("caddyPath", caddyPath);
String caddyExe = settingService.get("caddyExe");
modelAndView.addObject("caddyExe", caddyExe);
String caddyDir = settingService.get("caddyDir");
modelAndView.addObject("caddyDir", caddyDir);
String decompose = settingService.get("decompose");
modelAndView.addObject("decompose", decompose);
modelAndView.addObject("tmp", InitConfig.home + "temp/caddfile");
modelAndView.setViewName("/adminPage/caddyConf/index");
return modelAndView;
}
@RequestMapping(value = "caddyStatus")
@ResponseBody
public JsonResult caddyStatus() {
if (CaddyUtils.isRun()) {
return renderSuccess(m.get("confStr.caddyStatus") + ":<span class='green'>" + m.get("confStr.running") + "</span>");
} else {
return renderSuccess(m.get("confStr.caddyStatus") + ":<span class='red'>" + m.get("confStr.stopped") + "</span>");
}
}
@RequestMapping(value = "replace")
@ResponseBody
public JsonResult replace(String json) {
if (StrUtil.isEmpty(json)) {
json = getReplaceJson();
}
JSONObject jsonObject = JSONUtil.parseObj(json);
String caddyPath = jsonObject.getStr("caddyPath");
String caddyContent = Base64.decodeStr(jsonObject.getStr("caddyContent"), CharsetUtil.CHARSET_UTF_8);
caddyContent = URLDecoder.decode(caddyContent, CharsetUtil.CHARSET_UTF_8).replace("<wave>", "~");
if (caddyPath == null) {
caddyPath = settingService.get("caddyPath");
}
if (FileUtil.isDirectory(caddyPath)) {
// 是文件夹, 提示
return renderError(m.get("confStr.error2"));
}
if (!FileUtil.exist(caddyPath)) {
return renderError(m.get("confStr.error1"));
}
try {
caddyService.replace(caddyPath, caddyContent);
return renderSuccess(m.get("confStr.replaceSuccess"));
} catch (Exception e) {
e.printStackTrace();
return renderError(m.get("confStr.error3") + ":" + e.getMessage());
}
}
public String getReplaceJson() {
String decompose = settingService.get("decompose");
ConfExt confExt = caddyService.buildConf(StrUtil.isNotEmpty(decompose) && decompose.equals("true"), false);
URLEncoder urlEncoder = new URLEncoder();
JSONObject jsonObject = new JSONObject();
jsonObject.set("caddyContent", Base64.encode(urlEncoder.encode(confExt.getConf(), CharsetUtil.CHARSET_UTF_8)));
jsonObject.set("subContent", new JSONArray());
jsonObject.set("subName", new JSONArray());
for (ConfFile confFile : confExt.getFileList()) {
jsonObject.getJSONArray("subContent").add(Base64.encode(urlEncoder.encode(confFile.getConf(), CharsetUtil.CHARSET_UTF_8)));
jsonObject.getJSONArray("subName").add(confFile.getName());
}
return jsonObject.toStringPretty();
}
@RequestMapping(value = "check")
@ResponseBody
public JsonResult check(String caddyPath, String caddyExe, String caddyDir) {
if (caddyExe == null) {
caddyExe = settingService.get("caddyExe");
}
if (caddyDir == null) {
caddyDir = settingService.get("caddyDir");
}
String decompose = settingService.get("decompose");
String rs = null;
String cmd = null;
FileUtil.del(InitConfig.home + "temp");
String fileTemp = InitConfig.home + "temp/Caddyfile";
try {
ConfExt confExt = caddyService.buildConf(StrUtil.isNotEmpty(decompose) && decompose.equals("true"), true);
FileUtil.writeString(confExt.getConf(), fileTemp, CharsetUtil.CHARSET_UTF_8);
if (SystemTool.isWindows()) {
cmd = caddyExe + " validate --config " + fileTemp;
} else {
cmd = caddyExe + " validate --config " + fileTemp;
}
rs = RuntimeUtil.execForStr(cmd);
} catch (Exception e) {
e.printStackTrace();
rs = e.getMessage().replace("\n", "<br>");
}
cmd = "<span class='blue'>" + "执行命令如下:" + cmd + "</span>";
if (rs.contains("Valid configuration")) {
return renderSuccess(cmd + "<br> <span class='green' style='font-size:18px'>" + m.get("confStr.verifySuccess") + "</span> <br>" + "<br> 控制台输出如下:<br>" + rs.replace("\n", "<br>"));
} else {
return renderError(cmd + "<br> <span class='red' style='font-size:18px'>" + m.get("confStr.verifyFail") + "</span> <br>" + "<br> 控制台输出如下:<br>" + rs.replace("\n", "<br>"));
}
}
@RequestMapping(value = "saveCmd")
@ResponseBody
public JsonResult saveCmd(String caddyPath, String caddyExe, String caddyDir) {
caddyPath = caddyPath.replace("\\", "/");
settingService.set("caddyPath", caddyPath);
caddyExe = caddyExe.replace("\\", "/");
settingService.set("caddyExe", caddyExe);
caddyDir = caddyDir.replace("\\", "/");
settingService.set("caddyDir", caddyDir);
return renderSuccess();
}
@RequestMapping(value = "reload")
@ResponseBody
public synchronized JsonResult reload(String caddyPath, String caddyExe, String caddyDir) {
if (caddyPath == null) {
caddyPath = settingService.get("caddyPath");
}
if (caddyExe == null) {
caddyExe = settingService.get("caddyExe");
}
if (caddyDir == null) {
caddyDir = settingService.get("caddyDir");
}
try {
String cmd = caddyExe + " reload --config " + caddyPath;
String rs = RuntimeUtil.execForStr(cmd);
cmd = "<span class='blue'>" + "执行命令如下:" + cmd + "</span>";
if (StrUtil.isEmpty(rs) || rs.contains("using provided configuration")) {
return renderSuccess(cmd + "<br> <span class='green' style='font-size:18px'>" + m.get("confStr.reloadSuccess") + "</span> <br>" + "<br> 控制台输出如下:<br>" + rs.replace("\n", "<br>"));
} else {
if (rs.contains(" no config file to load") || rs.contains("caddy.pid") || rs.contains("PID")) {
rs = rs + m.get("confStr.mayNotRun");
}
return renderError(cmd + "<br> <span class='red' style='font-size:18px'>" + m.get("confStr.reloadFail") + "</span> <br>" + "<br> 控制台输出如下:<br>" + rs.replace("\n", "<br>"));
}
} catch (Exception e) {
e.printStackTrace();
return renderError(m.get("confStr.reloadFail") + "<br>" + e.getMessage().replace("\n", "<br>"));
}
}
@RequestMapping(value = "start")
@ResponseBody
public JsonResult start(String caddyPath, String caddyExe, String caddyDir) {
if (caddyPath == null) {
caddyPath = settingService.get("caddyPath");
}
if (caddyExe == null) {
caddyExe = settingService.get("caddyExe");
}
if (caddyDir == null) {
caddyDir = settingService.get("caddyDir");
}
try {
String rs = "";
String cmd;
if (SystemTool.isWindows()) {
cmd = "cmd /c start caddy.exe" + " -c " + caddyPath + " -p " + caddyDir;
RuntimeUtil.exec(new String[]{}, new File(caddyDir), cmd);
} else {
cmd = caddyExe + " -c " + caddyPath;
if (StrUtil.isNotEmpty(caddyDir)) {
cmd += " -p " + caddyDir;
}
rs = RuntimeUtil.execForStr(cmd);
}
cmd = "<span class='blue'>" + cmd + "</span>";
if (StrUtil.isEmpty(rs) || rs.contains("signal process started")) {
return renderSuccess(cmd + "<br>" + m.get("confStr.startSuccess") + "<br>" + rs.replace("\n", "<br>"));
} else {
return renderError(cmd + "<br>" + m.get("confStr.startFail") + "<br>" + rs.replace("\n", "<br>"));
}
} catch (Exception e) {
e.printStackTrace();
return renderError(m.get("confStr.startFail") + "<br>" + e.getMessage().replace("\n", "<br>"));
}
}
@RequestMapping(value = "stop")
@ResponseBody
public JsonResult stop(String caddyExe, String caddyDir) {
if (caddyExe == null) {
caddyExe = settingService.get("caddyExe");
}
if (caddyDir == null) {
caddyDir = settingService.get("caddyDir");
}
try {
String cmd;
if (SystemTool.isWindows()) {
cmd = "taskkill /im /f caddy.exe ";
} else {
cmd = "pkill caddy";
}
String rs = RuntimeUtil.execForStr(cmd);
cmd = "<span class='blue'>" + cmd + "</span>";
if (StrUtil.isEmpty(rs) || rs.contains("已终止进程") || rs.toLowerCase().contains("terminated process")) {
return renderSuccess(cmd + "<br>" + m.get("confStr.stopSuccess") + "<br>" + rs.replace("\n", "<br>"));
} else {
return renderError(cmd + "<br>" + m.get("confStr.stopFail") + "<br>" + rs.replace("\n", "<br>"));
}
} catch (Exception e) {
e.printStackTrace();
return renderError(m.get("confStr.stopFail") + "<br>" + e.getMessage().replace("\n", "<br>"));
}
}
@RequestMapping(value = "runCmd")
@ResponseBody
public JsonResult runCmd(String cmd, String type) {
settingService.set(type, cmd);
try {
String rs = "";
if (cmd.contains(".exe")) {
RuntimeUtil.exec(cmd);
} else {
rs = RuntimeUtil.execForStr(cmd);
}
cmd = "<span class='blue'>" + cmd + "</span>";
if (StrUtil.isEmpty(rs) || rs.contains("已终止进程") || rs.contains("signal process started") || rs.toLowerCase().contains("terminated process")) {
return renderSuccess(cmd + "<br>" + m.get("confStr.runSuccess") + "<br>" + rs.replace("\n", "<br>"));
} else {
return renderError(cmd + "<br>" + m.get("confStr.runFail") + "<br>" + rs.replace("\n", "<br>"));
}
} catch (Exception e) {
e.printStackTrace();
return renderError(m.get("confStr.runFail") + "<br>" + e.getMessage().replace("\n", "<br>"));
}
}
@RequestMapping(value = "getLastCmd")
@ResponseBody
public JsonResult getLastCmd(String type) {
return renderSuccess(settingService.get(type));
}
@RequestMapping(value = "loadCaddyfile")
@ResponseBody
public JsonResult loadCaddyfile() {
ConfExt confExt = caddyService.buildCaddyfile();
return renderSuccess(confExt);
}
@RequestMapping(value = "loadConf")
@ResponseBody
public JsonResult loadConf() {
String decompose = settingService.get("decompose");
ConfExt confExt = caddyService.buildConf(StrUtil.isNotEmpty(decompose) && decompose.equals("true"), false);
return renderSuccess(confExt);
}
@RequestMapping(value = "loadOrg")
@ResponseBody
public JsonResult loadOrg(String caddyPath) {
String decompose = settingService.get("decompose");
ConfExt confExt = caddyService.buildConf(StrUtil.isNotEmpty(decompose) && decompose.equals("true"), false);
if (StrUtil.isNotEmpty(caddyPath) && FileUtil.exist(caddyPath) && FileUtil.isFile(caddyPath)) {
String orgStr = FileUtil.readString(caddyPath, StandardCharsets.UTF_8);
confExt.setConf(orgStr);
for (ConfFile confFile : confExt.getFileList()) {
confFile.setConf("");
String filePath = caddyPath.replace("caddy.conf", "conf.d/" + confFile.getName());
if (FileUtil.exist(filePath)) {
confFile.setConf(FileUtil.readString(filePath, StandardCharsets.UTF_8));
}
}
return renderSuccess(confExt);
} else {
if (FileUtil.isDirectory(caddyPath)) {
return renderError(m.get("confStr.error2"));
}
return renderError(m.get("confStr.caddyNotExist"));
}
}
@RequestMapping(value = "decompose")
@ResponseBody
public JsonResult decompose(String decompose) {
settingService.set("decompose", decompose);
return renderSuccess();
}
@RequestMapping(value = "update")
@ResponseBody
public JsonResult update() {
versionConfig.getNewVersion();
if (Integer.parseInt(currentVersion.replace(".", "").replace("v", "")) < Integer.parseInt(versionConfig.getVersion().getVersion().replace(".", "").replace("v", ""))) {
mainController.autoUpdate(versionConfig.getVersion().getUrl());
return renderSuccess(m.get("confStr.updateSuccess"));
} else {
return renderSuccess(m.get("confStr.noNeedUpdate"));
}
}
@RequestMapping(value = "getKey")
@ResponseBody
public JsonResult getKey(String key) {
return renderSuccess(settingService.get(key));
}
@RequestMapping(value = "setKey")
@ResponseBody
public JsonResult setKey(String key, String val) {
settingService.set(key, val);
return renderSuccess();
}
}
|
package com.kientpham.sample;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class SharedDTO {
private String anything;
}
|
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/
package org.opensearch.sql.data.model;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_FALSE;
import static org.opensearch.sql.data.model.ExprValueUtils.LITERAL_NULL;
import static org.opensearch.sql.utils.ComparisonUtil.compare;
import org.junit.jupiter.api.Test;
import org.opensearch.sql.data.type.ExprCoreType;
import org.opensearch.sql.exception.ExpressionEvaluationException;
public class ExprNullValueTest {
@Test
public void test_is_null() {
assertTrue(LITERAL_NULL.isNull());
}
@Test
public void getValue() {
assertNull(LITERAL_NULL.value());
}
@Test
public void getType() {
assertEquals(ExprCoreType.UNDEFINED, LITERAL_NULL.type());
}
@Test
public void toStringTest() {
assertEquals("NULL", LITERAL_NULL.toString());
}
@Test
public void equal() {
assertTrue(LITERAL_NULL.equals(LITERAL_NULL));
assertFalse(LITERAL_FALSE.equals(LITERAL_NULL));
assertFalse(LITERAL_NULL.equals(LITERAL_FALSE));
}
@Test
public void comparabilityTest() {
ExpressionEvaluationException exception = assertThrows(ExpressionEvaluationException.class,
() -> compare(LITERAL_NULL, LITERAL_NULL));
assertEquals("invalid to call compare operation on null value", exception.getMessage());
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.authorization.util;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.util.NiFiProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class IdentityMappingUtil {
private static final Logger LOGGER = LoggerFactory.getLogger(IdentityMappingUtil.class);
private static final Pattern backReferencePattern = Pattern.compile("\\$(\\d+)");
/**
* Builds the identity mappings from NiFiProperties.
*
* @param properties the NiFiProperties instance
* @return a list of identity mappings
*/
public static List<IdentityMapping> getIdentityMappings(final NiFiProperties properties) {
final List<IdentityMapping> mappings = new ArrayList<>();
// go through each property
for (String propertyName : properties.getPropertyKeys()) {
if (StringUtils.startsWith(propertyName, NiFiProperties.SECURITY_IDENTITY_MAPPING_PATTERN_PREFIX)) {
final String key = StringUtils.substringAfter(propertyName, NiFiProperties.SECURITY_IDENTITY_MAPPING_PATTERN_PREFIX);
final String identityPattern = properties.getProperty(propertyName);
if (StringUtils.isBlank(identityPattern)) {
LOGGER.warn("Identity Mapping property {} was found, but was empty", new Object[]{propertyName});
continue;
}
final String identityValueProperty = NiFiProperties.SECURITY_IDENTITY_MAPPING_VALUE_PREFIX + key;
final String identityValue = properties.getProperty(identityValueProperty);
if (StringUtils.isBlank(identityValue)) {
LOGGER.warn("Identity Mapping property {} was found, but corresponding value {} was not found",
new Object[]{propertyName, identityValueProperty});
continue;
}
final IdentityMapping identityMapping = new IdentityMapping(key, Pattern.compile(identityPattern), identityValue);
mappings.add(identityMapping);
LOGGER.debug("Found Identity Mapping with key = {}, pattern = {}, value = {}",
new Object[] {key, identityPattern, identityValue});
}
}
// sort the list by the key so users can control the ordering in nifi.properties
Collections.sort(mappings, new Comparator<IdentityMapping>() {
@Override
public int compare(IdentityMapping m1, IdentityMapping m2) {
return m1.getKey().compareTo(m2.getKey());
}
});
return mappings;
}
/**
* Checks the given identity against each provided mapping and performs the mapping using the first one that matches.
* If none match then the identity is returned as is.
*
* @param identity the identity to map
* @param mappings the mappings
* @return the mapped identity, or the same identity if no mappings matched
*/
public static String mapIdentity(final String identity, List<IdentityMapping> mappings) {
for (IdentityMapping mapping : mappings) {
Matcher m = mapping.getPattern().matcher(identity);
if (m.matches()) {
final String pattern = mapping.getPattern().pattern();
final String replacementValue = escapeLiteralBackReferences(mapping.getReplacementValue(), m.groupCount());
return identity.replaceAll(pattern, replacementValue);
}
}
return identity;
}
// If we find a back reference that is not valid, then we will treat it as a literal string. For example, if we have 3 capturing
// groups and the Replacement Value has the value is "I owe $8 to him", then we want to treat the $8 as a literal "$8", rather
// than attempting to use it as a back reference.
private static String escapeLiteralBackReferences(final String unescaped, final int numCapturingGroups) {
if (numCapturingGroups == 0) {
return unescaped;
}
String value = unescaped;
final Matcher backRefMatcher = backReferencePattern.matcher(value);
while (backRefMatcher.find()) {
final String backRefNum = backRefMatcher.group(1);
if (backRefNum.startsWith("0")) {
continue;
}
final int originalBackRefIndex = Integer.parseInt(backRefNum);
int backRefIndex = originalBackRefIndex;
// if we have a replacement value like $123, and we have less than 123 capturing groups, then
// we want to truncate the 3 and use capturing group 12; if we have less than 12 capturing groups,
// then we want to truncate the 2 and use capturing group 1; if we don't have a capturing group then
// we want to truncate the 1 and get 0.
while (backRefIndex > numCapturingGroups && backRefIndex >= 10) {
backRefIndex /= 10;
}
if (backRefIndex > numCapturingGroups) {
final StringBuilder sb = new StringBuilder(value.length() + 1);
final int groupStart = backRefMatcher.start(1);
sb.append(value.substring(0, groupStart - 1));
sb.append("\\");
sb.append(value.substring(groupStart - 1));
value = sb.toString();
}
}
return value;
}
}
|
package io.gitlab.arturbosch.jpal.dummies;
import io.gitlab.arturbosch.jpal.dummies.resolving.LongChainResolving;
import io.gitlab.arturbosch.jpal.dummies.resolving.SolveTypeDummy;
import io.gitlab.arturbosch.jpal.dummies.resolving.SubSolveTypeDummy;
/**
* @author Artur Bosch
*/
@SuppressWarnings("ALL")
public class ResolvingDummy {
private int a = 5;
private int b = 5;
private int x = 5;
private InnerResolvingDummy inner = new InnerResolvingDummy();
private SolveTypeDummy solveDummy = new SolveTypeDummy();
// variable resolving
public int m(int d) {
int c = 5;
if (a == c) {
b += c;
}
return b + d;
}
// same symbol name in different variables
public void m2() {
int x = 0;
while (true) {
this.x = x + 1;
int xnew = x + 2;
}
}
// resolving 'this' or one level calls/accesses
public String m3() {
String method = solveDummy.method(x);
m2();
int x = this.x;
return inner.s + method + inner.call();
}
// resolving method chaining + object creation
public void m4() {
new ChainResolving().inner.call();
String result = new ChainResolving().chain().method(1);
SolveTypeDummy.instance().method(SolveTypeDummy.instanceField);
}
// how about very long chains?
public void m5() {
new LongChainResolving().firstChain.secondChain.thirdChain
.fourthChain.fifthChain.selfChain.selfChain.getAnInt();
}
// Simple Inheritence
public void m6() {
new SubSolveTypeDummy().stuff();
new SubSolveTypeDummy().method(new SubSolveTypeDummy().MAGIC_NUMBER);
new SubSolveTypeDummy().instance().instance().method(5);
}
// builder pattern
public void m7() {
new ResolvingBuilder().withId(1).withName("abc").build();
}
class InnerResolvingDummy {
String s;
String call() {
return "";
}
}
class ChainResolving {
private InnerResolvingDummy inner = new InnerResolvingDummy();
SolveTypeDummy chain() {
return new SolveTypeDummy();
}
}
class ResolvingBuilder {
ResolvingBuilder withId(int i){
return this;
}
ResolvingBuilder withName(String s){
return this;
}
ResolvingDummy build() {
return new ResolvingDummy();
}
}
}
|
package nl.mwensveen.adventofcode.year_2020.day_15;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Arrays;
import java.util.List;
import org.junit.jupiter.api.Test;
public class GamePlayerTest {
@Test
public void testFindNumber() throws Exception {
List<Long> input = Arrays.asList(0L, 3L, 6L);
GamePlayer gamePlayer = new GamePlayer();
assertEquals(0, gamePlayer.findNumber(4, input));
assertEquals(3, gamePlayer.findNumber(5, input));
assertEquals(3, gamePlayer.findNumber(6, input));
assertEquals(1, gamePlayer.findNumber(7, input));
assertEquals(0, gamePlayer.findNumber(8, input));
assertEquals(4, gamePlayer.findNumber(9, input));
assertEquals(0, gamePlayer.findNumber(10, input));
assertEquals(436, gamePlayer.findNumber(2020, input));
assertEquals(175594, gamePlayer.findNumber(30000000, input));
}
@Test
public void testFindNumber2() throws Exception {
List<Long> input = Arrays.asList(1L, 3L, 2L);
GamePlayer gamePlayer = new GamePlayer();
assertEquals(1, gamePlayer.findNumber(2020, input));
}
@Test
public void testFindNumber3() throws Exception {
List<Long> input = Arrays.asList(2L, 1L, 3L);
GamePlayer gamePlayer = new GamePlayer();
assertEquals(10, gamePlayer.findNumber(2020, input));
}
@Test
public void testFindNumber4() throws Exception {
List<Long> input = Arrays.asList(1L, 2L, 3L);
GamePlayer gamePlayer = new GamePlayer();
assertEquals(27, gamePlayer.findNumber(2020, input));
}
@Test
public void testFindNumber5() throws Exception {
List<Long> input = Arrays.asList(2L, 3L, 1L);
GamePlayer gamePlayer = new GamePlayer();
assertEquals(78, gamePlayer.findNumber(2020, input));
}
@Test
public void testFindNumber6() throws Exception {
List<Long> input = Arrays.asList(3L, 2L, 1L);
GamePlayer gamePlayer = new GamePlayer();
assertEquals(438, gamePlayer.findNumber(2020, input));
}
@Test
public void testFindNumber7() throws Exception {
List<Long> input = Arrays.asList(3L, 1L, 2L);
GamePlayer gamePlayer = new GamePlayer();
assertEquals(1836, gamePlayer.findNumber(2020, input));
}
}
|
/*-
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.deeplearning4j.eval;
import com.google.common.base.Preconditions;
import lombok.*;
import lombok.extern.slf4j.Slf4j;
import org.deeplearning4j.eval.meta.Prediction;
import org.deeplearning4j.eval.serde.ConfusionMatrixDeserializer;
import org.deeplearning4j.eval.serde.ConfusionMatrixSerializer;
import org.deeplearning4j.nn.api.Layer;
import org.deeplearning4j.nn.graph.ComputationGraph;
import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.impl.accum.MatchCondition;
import org.nd4j.linalg.api.ops.impl.transforms.Not;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.indexing.conditions.Conditions;
import org.nd4j.linalg.lossfunctions.serde.RowVectorDeserializer;
import org.nd4j.linalg.lossfunctions.serde.RowVectorSerializer;
import org.nd4j.linalg.primitives.Counter;
import org.nd4j.linalg.primitives.Pair;
import org.nd4j.shade.jackson.annotation.JsonIgnoreProperties;
import org.nd4j.shade.jackson.databind.annotation.JsonDeserialize;
import org.nd4j.shade.jackson.databind.annotation.JsonSerialize;
import java.io.Serializable;
import java.text.DecimalFormat;
import java.util.*;
/**
* Evaluation metrics:<br>
* - precision, recall, f1, fBeta, accuracy, Matthews correlation coefficient, gMeasure<br>
* - Top N accuracy (if using constructor {@link #Evaluation(List, int)})<br>
* - Custom binary evaluation decision threshold (use constructor {@link #Evaluation(double)} (default if not set is
* argmax / 0.5)<br>
* - Custom cost array, using {@link #Evaluation(INDArray)} or {@link #Evaluation(List, INDArray)} for multi-class <br>
* <br>
* Note: Care should be taken when using the Evaluation class for binary classification metrics such as F1, precision,
* recall, etc. There are a number of cases to consider:<br>
* 1. For binary classification (1 or 2 network outputs)<br>
* a) Default behaviour: class 1 is assumed as the positive class. Consequently, no-arg methods such as {@link #f1()},
* {@link #precision()}, {@link #recall()} etc will report the binary metric for class 1 only<br>
* b) To set class 0 as the positive class instead of class 1 (the default), use {@link #Evaluation(int, Integer)} or
* {@link #Evaluation(double, Integer)} or {@link #setBinaryPositiveClass(Integer)}. Then, {@link #f1()},
* {@link #precision()}, {@link #recall()} etc will report the binary metric for class 0 only.<br>
* c) To use macro-averaged metrics over both classes for binary classification (uncommon and usually not advisable)
* specify 'null' as the argument (instead of 0 or 1) as per (b) above<br>
* 2. For multi-class classification, binary metric methods such as {@link #f1()}, {@link #precision()}, {@link #recall()}
* will report macro-average (of the one-vs-all) binary metrics. Note that you can specify micro vs. macro averaging
* using {@link #f1(EvaluationAveraging)} and similar methods<br>
* <br>
* Note that setting a custom binary decision threshold is only possible for the binary case (1 or 2 outputs) and cannot
* be used if the number of classes exceeds 2. Predictions with probability > threshold are considered to be class 1,
* and are considered class 0 otherwise.<br>
* <br>
* Cost arrays (a row vector, of size equal to the number of outputs) modify the evaluation process: instead of simply
* doing predictedClass = argMax(probabilities), we do predictedClass = argMax(cost * probabilities). Consequently, an
* array of all 1s (or, indeed any array of equal values) will result in the same performance as no cost array; non-
* equal values will bias the predictions for or against certain classes.
*
* @author Adam Gibson
*/
@Slf4j
@EqualsAndHashCode(callSuper = true)
@Getter
@Setter
@JsonIgnoreProperties({"confusionMatrixMetaData"})
public class Evaluation extends BaseEvaluation<Evaluation> {
public enum Metric {ACCURACY, F1, PRECISION, RECALL, GMEASURE, MCC}
//What to output from the precision/recall function when we encounter an edge case
protected static final double DEFAULT_EDGE_VALUE = 0.0;
protected static final int CONFUSION_PRINT_MAX_CLASSES = 20;
protected Integer binaryPositiveClass = 1; //Used *only* for binary classification; default value here to 1 for legacy JSON loading
protected final int topN;
protected int topNCorrectCount = 0;
protected int topNTotalCount = 0; //Could use topNCountCorrect / (double)getNumRowCounter() - except for eval(int,int), hence separate counters
protected Counter<Integer> truePositives = new Counter<>();
protected Counter<Integer> falsePositives = new Counter<>();
protected Counter<Integer> trueNegatives = new Counter<>();
protected Counter<Integer> falseNegatives = new Counter<>();
@JsonSerialize(using = ConfusionMatrixSerializer.class)
@JsonDeserialize(using = ConfusionMatrixDeserializer.class)
protected ConfusionMatrix<Integer> confusion;
protected int numRowCounter = 0;
@Getter
@Setter
protected List<String> labelsList = new ArrayList<>();
protected Double binaryDecisionThreshold;
@JsonSerialize(using = RowVectorSerializer.class)
@JsonDeserialize(using = RowVectorDeserializer.class)
protected INDArray costArray;
protected Map<Pair<Integer, Integer>, List<Object>> confusionMatrixMetaData; //Pair: (Actual,Predicted)
// Empty constructor
public Evaluation() {
this.topN = 1;
this.binaryPositiveClass = 1;
}
/**
* The number of classes to account for in the evaluation
* @param numClasses the number of classes to account for in the evaluation
*/
public Evaluation(int numClasses) {
this(numClasses, (numClasses == 2 ? 1 : null));
}
/**
* Constructor for specifying the number of classes, and optionally the positive class for binary classification.
* See Evaluation javadoc for more details on evaluation in the binary case
*
* @param numClasses The number of classes for the evaluation. Must be 2, if binaryPositiveClass is non-null
* @param binaryPositiveClass If non-null, the positive class (0 or 1).
*/
public Evaluation(int numClasses, Integer binaryPositiveClass){
this(createLabels(numClasses), 1);
if(binaryPositiveClass != null){
Preconditions.checkArgument(binaryPositiveClass == 0 || binaryPositiveClass == 1,
"Only 0 and 1 are valid inputs for binaryPositiveClass; got " + binaryPositiveClass);
Preconditions.checkArgument(numClasses == 2, "Cannot set binaryPositiveClass argument " +
"when number of classes is not equal to 2 (got: numClasses=" + numClasses + ")");
}
this.binaryPositiveClass = binaryPositiveClass;
}
/**
* The labels to include with the evaluation.
* This constructor can be used for
* generating labeled output rather than just
* numbers for the labels
* @param labels the labels to use
* for the output
*/
public Evaluation(List<String> labels) {
this(labels, 1);
}
/**
* Use a map to generate labels
* Pass in a label index with the actual label
* you want to use for output
* @param labels a map of label index to label value
*/
public Evaluation(Map<Integer, String> labels) {
this(createLabelsFromMap(labels), 1);
}
/**
* Constructor to use for top N accuracy
*
* @param labels Labels for the classes (may be null)
* @param topN Value to use for top N accuracy calculation (<=1: standard accuracy). Note that with top N
* accuracy, an example is considered 'correct' if the probability for the true class is one of the
* highest N values
*/
public Evaluation(List<String> labels, int topN) {
this.labelsList = labels;
if (labels != null) {
createConfusion(labels.size());
}
this.topN = topN;
if(labels != null && labels.size() == 2){
this.binaryPositiveClass = 1;
}
}
/**
* Create an evaluation instance with a custom binary decision threshold. Note that binary decision thresholds can
* only be used with binary classifiers.<br>
* Defaults to class 1 for the positive class - see class javadoc, and use {@link #Evaluation(double, Integer)} to
* change this.
*
* @param binaryDecisionThreshold Decision threshold to use for binary predictions
*/
public Evaluation(double binaryDecisionThreshold) {
this(binaryDecisionThreshold, 1);
}
/**
* Create an evaluation instance with a custom binary decision threshold. Note that binary decision thresholds can
* only be used with binary classifiers.<br>
* This constructor also allows the user to specify the positive class for binary classification. See class javadoc
* for more details.
*
* @param binaryDecisionThreshold Decision threshold to use for binary predictions
*/
public Evaluation(double binaryDecisionThreshold, @NonNull Integer binaryPositiveClass) {
if(binaryPositiveClass != null){
Preconditions.checkArgument(binaryPositiveClass == 0 || binaryPositiveClass == 1,
"Only 0 and 1 are valid inputs for binaryPositiveClass; got " + binaryPositiveClass);
}
this.binaryDecisionThreshold = binaryDecisionThreshold;
this.topN = 1;
this.binaryPositiveClass = binaryPositiveClass;
}
/**
* Created evaluation instance with the specified cost array. A cost array can be used to bias the multi class
* predictions towards or away from certain classes. The predicted class is determined using argMax(cost * probability)
* instead of argMax(probability) when no cost array is present.
*
* @param costArray Row vector cost array. May be null
*/
public Evaluation(INDArray costArray) {
this(null, costArray);
}
/**
* Created evaluation instance with the specified cost array. A cost array can be used to bias the multi class
* predictions towards or away from certain classes. The predicted class is determined using argMax(cost * probability)
* instead of argMax(probability) when no cost array is present.
*
* @param labels Labels for the output classes. May be null
* @param costArray Row vector cost array. May be null
*/
public Evaluation(List<String> labels, INDArray costArray) {
if (costArray != null && !costArray.isRowVectorOrScalar()) {
throw new IllegalArgumentException("Invalid cost array: must be a row vector (got shape: "
+ Arrays.toString(costArray.shape()) + ")");
}
if (costArray != null && costArray.minNumber().doubleValue() < 0.0) {
throw new IllegalArgumentException("Invalid cost array: Cost array values must be positive");
}
this.labelsList = labels;
this.costArray = costArray;
this.topN = 1;
}
protected int numClasses(){
if(labelsList != null){
return labelsList.size();
}
return confusion().getClasses().size();
}
@Override
public void reset() {
confusion = null;
truePositives = new Counter<>();
falsePositives = new Counter<>();
trueNegatives = new Counter<>();
falseNegatives = new Counter<>();
topNCorrectCount = 0;
topNTotalCount = 0;
numRowCounter = 0;
}
private ConfusionMatrix<Integer> confusion() {
return confusion;
}
private static List<String> createLabels(int numClasses) {
if (numClasses == 1)
numClasses = 2; //Binary (single output variable) case...
List<String> list = new ArrayList<>(numClasses);
for (int i = 0; i < numClasses; i++) {
list.add(String.valueOf(i));
}
return list;
}
private static List<String> createLabelsFromMap(Map<Integer, String> labels) {
int size = labels.size();
List<String> labelsList = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
String str = labels.get(i);
if (str == null)
throw new IllegalArgumentException("Invalid labels map: missing key for class " + i
+ " (expect integers 0 to " + (size - 1) + ")");
labelsList.add(str);
}
return labelsList;
}
private void createConfusion(int nClasses) {
List<Integer> classes = new ArrayList<>();
for (int i = 0; i < nClasses; i++) {
classes.add(i);
}
confusion = new ConfusionMatrix<>(classes);
}
/**
* Evaluate the output
* using the given true labels,
* the input to the multi layer network
* and the multi layer network to
* use for evaluation
* @param trueLabels the labels to ise
* @param input the input to the network to use
* for evaluation
* @param network the network to use for output
*/
public void eval(INDArray trueLabels, INDArray input, ComputationGraph network) {
eval(trueLabels, network.output(false, input)[0]);
}
/**
* Evaluate the output
* using the given true labels,
* the input to the multi layer network
* and the multi layer network to
* use for evaluation
* @param trueLabels the labels to ise
* @param input the input to the network to use
* for evaluation
* @param network the network to use for output
*/
public void eval(INDArray trueLabels, INDArray input, MultiLayerNetwork network) {
eval(trueLabels, network.output(input, Layer.TrainingMode.TEST));
}
/**
* Collects statistics on the real outcomes vs the
* guesses. This is for logistic outcome matrices.
* <p>
* Note that an IllegalArgumentException is thrown if the two passed in
* matrices aren't the same length.
*
* @param realOutcomes the real outcomes (labels - usually binary)
* @param guesses the guesses/prediction (usually a probability vector)
*/
public void eval(INDArray realOutcomes, INDArray guesses) {
eval(realOutcomes, guesses, (List<Serializable>) null);
}
/**
* Evaluate the network, with optional metadata
*
* @param realOutcomes Data labels
* @param guesses Network predictions
* @param recordMetaData Optional; may be null. If not null, should have size equal to the number of outcomes/guesses
*
*/
@Override
public void eval(final INDArray realOutcomes, final INDArray guesses,
final List<? extends Serializable> recordMetaData) {
// Add the number of rows to numRowCounter
numRowCounter += realOutcomes.size(0);
// If confusion is null, then Evaluation was instantiated without providing the classes -> infer # classes from
if (confusion == null) {
int nClasses = realOutcomes.columns();
if (nClasses == 1)
nClasses = 2; //Binary (single output variable) case
labelsList = new ArrayList<>(nClasses);
for (int i = 0; i < nClasses; i++)
labelsList.add(String.valueOf(i));
createConfusion(nClasses);
}
// Length of real labels must be same as length of predicted labels
if (realOutcomes.length() != guesses.length())
throw new IllegalArgumentException("Unable to evaluate. Outcome matrices not same length");
// For each row get the most probable label (column) from prediction and assign as guessMax
// For each row get the column of the true label and assign as currMax
final int nCols = realOutcomes.columns();
final int nRows = realOutcomes.rows();
if (nCols == 1) {
INDArray binaryGuesses = guesses.gt(binaryDecisionThreshold == null ? 0.5 : binaryDecisionThreshold);
INDArray notLabel = Nd4j.getExecutioner().execAndReturn(new Not(realOutcomes.dup()));
INDArray notGuess = Nd4j.getExecutioner().execAndReturn(new Not(binaryGuesses.dup()));
//tp: predicted = 1, actual = 1
int tp = binaryGuesses.mul(realOutcomes).sumNumber().intValue();
//fp: predicted = 1, actual = 0
int fp = binaryGuesses.mul(notLabel).sumNumber().intValue();
//fn: predicted = 0, actual = 1
int fn = notGuess.mul(realOutcomes).sumNumber().intValue();
int tn = nRows - tp - fp - fn;
confusion().add(1, 1, tp);
confusion().add(1, 0, fn);
confusion().add(0, 1, fp);
confusion().add(0, 0, tn);
truePositives.incrementCount(1, tp);
falsePositives.incrementCount(1, fp);
falseNegatives.incrementCount(1, fn);
trueNegatives.incrementCount(1, tn);
truePositives.incrementCount(0, tn);
falsePositives.incrementCount(0, fn);
falseNegatives.incrementCount(0, fp);
trueNegatives.incrementCount(0, tp);
if (recordMetaData != null) {
for (int i = 0; i < binaryGuesses.size(0); i++) {
if (i >= recordMetaData.size())
break;
int actual = realOutcomes.getDouble(0) == 0.0 ? 0 : 1;
int predicted = binaryGuesses.getDouble(0) == 0.0 ? 0 : 1;
addToMetaConfusionMatrix(actual, predicted, recordMetaData.get(i));
}
}
} else {
INDArray guessIndex;
if (binaryDecisionThreshold != null) {
if (nCols != 2) {
throw new IllegalStateException("Binary decision threshold is set, but number of columns for "
+ "predictions is " + nCols
+ ". Binary decision threshold can only be used for binary " + "prediction cases");
}
INDArray pClass1 = guesses.getColumn(1);
guessIndex = pClass1.gt(binaryDecisionThreshold);
} else if (costArray != null) {
//With a cost array: do argmax(cost * probability) instead of just argmax(probability)
guessIndex = Nd4j.argMax(guesses.mulRowVector(costArray), 1);
} else {
//Standard case: argmax
guessIndex = Nd4j.argMax(guesses, 1);
}
INDArray realOutcomeIndex = Nd4j.argMax(realOutcomes, 1);
val nExamples = guessIndex.length();
for (int i = 0; i < nExamples; i++) {
int actual = (int) realOutcomeIndex.getDouble(i);
int predicted = (int) guessIndex.getDouble(i);
confusion().add(actual, predicted);
if (recordMetaData != null && recordMetaData.size() > i) {
Object m = recordMetaData.get(i);
addToMetaConfusionMatrix(actual, predicted, m);
}
// instead of looping through each label for confusion
// matrix, instead infer those values by determining if true/false negative/positive,
// then just add across matrix
// if actual == predicted, then it's a true positive, assign true negative to every other label
if (actual == predicted) {
truePositives.incrementCount(actual, 1);
for (int col = 0; col < nCols; col++) {
if (col == actual) {
continue;
}
trueNegatives.incrementCount(col, 1); // all cols prior
}
} else {
falsePositives.incrementCount(predicted, 1);
falseNegatives.incrementCount(actual, 1);
// first determine intervals for adding true negatives
int lesserIndex, greaterIndex;
if (actual < predicted) {
lesserIndex = actual;
greaterIndex = predicted;
} else {
lesserIndex = predicted;
greaterIndex = actual;
}
// now loop through intervals
for (int col = 0; col < lesserIndex; col++) {
trueNegatives.incrementCount(col, 1); // all cols prior
}
for (int col = lesserIndex + 1; col < greaterIndex; col++) {
trueNegatives.incrementCount(col, 1); // all cols after
}
for (int col = greaterIndex + 1; col < nCols; col++) {
trueNegatives.incrementCount(col, 1); // all cols after
}
}
}
}
if (nCols > 1 && topN > 1) {
//Calculate top N accuracy
//TODO: this could be more efficient
INDArray realOutcomeIndex = Nd4j.argMax(realOutcomes, 1);
val nExamples = realOutcomeIndex.length();
for (int i = 0; i < nExamples; i++) {
int labelIdx = (int) realOutcomeIndex.getDouble(i);
double prob = guesses.getDouble(i, labelIdx);
INDArray row = guesses.getRow(i);
int countGreaterThan = (int) Nd4j.getExecutioner()
.exec(new MatchCondition(row, Conditions.greaterThan(prob)), Integer.MAX_VALUE)
.getDouble(0);
if (countGreaterThan < topN) {
//For example, for top 3 accuracy: can have at most 2 other probabilities larger
topNCorrectCount++;
}
topNTotalCount++;
}
}
}
/**
* Evaluate a single prediction (one prediction at a time)
*
* @param predictedIdx Index of class predicted by the network
* @param actualIdx Index of actual class
*/
public void eval(int predictedIdx, int actualIdx) {
// Add the number of rows to numRowCounter
numRowCounter++;
// If confusion is null, then Evaluation is instantiated without providing the classes
if (confusion == null) {
throw new UnsupportedOperationException(
"Cannot evaluate single example without initializing confusion matrix first");
}
addToConfusion(actualIdx, predictedIdx);
// If they are equal
if (predictedIdx == actualIdx) {
// Then add 1 to True Positive
// (For a particular label)
incrementTruePositives(predictedIdx);
// And add 1 for each negative class that is accurately predicted (True Negative)
//(For a particular label)
for (Integer clazz : confusion().getClasses()) {
if (clazz != predictedIdx)
trueNegatives.incrementCount(clazz, 1.0f);
}
} else {
// Otherwise the real label is predicted as negative (False Negative)
incrementFalseNegatives(actualIdx);
// Otherwise the prediction is predicted as falsely positive (False Positive)
incrementFalsePositives(predictedIdx);
// Otherwise true negatives
for (Integer clazz : confusion().getClasses()) {
if (clazz != predictedIdx && clazz != actualIdx)
trueNegatives.incrementCount(clazz, 1.0f);
}
}
}
/**
* Report the classification statistics as a String
* @return Classification statistics as a String
*/
public String stats() {
return stats(false);
}
/**
* Method to obtain the classification report as a String
*
* @param suppressWarnings whether or not to output warnings related to the evaluation results
* @return A (multi-line) String with accuracy, precision, recall, f1 score etc
*/
public String stats(boolean suppressWarnings) {
return stats(suppressWarnings, numClasses() <= CONFUSION_PRINT_MAX_CLASSES, numClasses() > CONFUSION_PRINT_MAX_CLASSES);
}
/**
* Method to obtain the classification report as a String
*
* @param suppressWarnings whether or not to output warnings related to the evaluation results
* @param includeConfusion whether the confusion matrix should be included it the returned stats or not
* @return A (multi-line) String with accuracy, precision, recall, f1 score etc
*/
public String stats(boolean suppressWarnings, boolean includeConfusion){
return stats(suppressWarnings, includeConfusion, false);
}
private String stats(boolean suppressWarnings, boolean includeConfusion, boolean logConfusionSizeWarning){
String actual, predicted;
StringBuilder builder = new StringBuilder().append("\n");
StringBuilder warnings = new StringBuilder();
ConfusionMatrix<Integer> confusion = confusion();
if(confusion == null){
confusion = new ConfusionMatrix<>(); //Empty
}
List<Integer> classes = confusion.getClasses();
List<Integer> falsePositivesWarningClasses = new ArrayList<>();
List<Integer> falseNegativesWarningClasses = new ArrayList<>();
for (Integer clazz : classes) {
//Output possible warnings regarding precision/recall calculation
if (!suppressWarnings && truePositives.getCount(clazz) == 0) {
if (falsePositives.getCount(clazz) == 0) {
falsePositivesWarningClasses.add(clazz);
}
if (falseNegatives.getCount(clazz) == 0) {
falseNegativesWarningClasses.add(clazz);
}
}
}
if (!falsePositivesWarningClasses.isEmpty()) {
warningHelper(warnings, falsePositivesWarningClasses, "precision");
}
if (!falseNegativesWarningClasses.isEmpty()) {
warningHelper(warnings, falseNegativesWarningClasses, "recall");
}
int nClasses = confusion.getClasses().size();
DecimalFormat df = new DecimalFormat("0.0000");
double acc = accuracy();
double precisionMacro = precision(EvaluationAveraging.Macro);
double recallMacro = recall(EvaluationAveraging.Macro);
double f1Macro = f1(EvaluationAveraging.Macro);
builder.append("\n========================Evaluation Metrics========================");
builder.append("\n # of classes: ").append(nClasses);
builder.append("\n Accuracy: ").append(format(df, acc));
if (topN > 1) {
double topNAcc = topNAccuracy();
builder.append("\n Top ").append(topN).append(" Accuracy: ").append(format(df, topNAcc));
}
builder.append("\n Precision: ").append(format(df, precisionMacro));
if (nClasses > 2 && averagePrecisionNumClassesExcluded() > 0) {
int ex = averagePrecisionNumClassesExcluded();
builder.append("\t(").append(ex).append(" class");
if (ex > 1)
builder.append("es");
builder.append(" excluded from average)");
}
builder.append("\n Recall: ").append(format(df, recallMacro));
if (nClasses > 2 && averageRecallNumClassesExcluded() > 0) {
int ex = averageRecallNumClassesExcluded();
builder.append("\t(").append(ex).append(" class");
if (ex > 1)
builder.append("es");
builder.append(" excluded from average)");
}
builder.append("\n F1 Score: ").append(format(df, f1Macro));
if (nClasses > 2 && averageF1NumClassesExcluded() > 0) {
int ex = averageF1NumClassesExcluded();
builder.append("\t(").append(ex).append(" class");
if (ex > 1)
builder.append("es");
builder.append(" excluded from average)");
}
if (nClasses > 2 || binaryPositiveClass == null) {
builder.append("\nPrecision, recall & F1: macro-averaged (equally weighted avg. of ").append(nClasses)
.append(" classes)");
}
if(nClasses == 2 && binaryPositiveClass != null){
builder.append("\nPrecision, recall & F1: reported for positive class (class ").append(binaryPositiveClass);
if(labelsList != null){
builder.append(" - \"").append(labelsList.get(binaryPositiveClass)).append("\"");
}
builder.append(") only");
}
if (binaryDecisionThreshold != null) {
builder.append("\nBinary decision threshold: ").append(binaryDecisionThreshold);
}
if (costArray != null) {
builder.append("\nCost array: ").append(Arrays.toString(costArray.dup().data().asFloat()));
}
//Note that we could report micro-averaged too - but these are the same as accuracy
//"Note that for “micro”-averaging in a multiclass setting with all labels included will produce equal precision, recall and F,"
//http://scikit-learn.org/stable/modules/model_evaluation.html
builder.append("\n\n");
builder.append(warnings);
if(includeConfusion){
builder.append("\n=========================Confusion Matrix=========================\n");
builder.append(confusionMatrix());
} else if(logConfusionSizeWarning){
builder.append("\n\nNote: Confusion matrix not generated due to space requirements for ")
.append(nClasses).append(" classes.\n")
.append("Use stats(false,true) to generate anyway");
}
builder.append("\n==================================================================");
return builder.toString();
}
/**
* Get the confusion matrix as a String
* @return Confusion matrix as a String
*/
public String confusionMatrix(){
int nClasses = numClasses();
if(confusion == null){
return "Confusion matrix: <no data>";
}
//First: work out the maximum count
List<Integer> classes = confusion.getClasses();
int maxCount = 1;
for (Integer i : classes) {
for (Integer j : classes) {
int count = confusion().getCount(i, j);
maxCount = Math.max(maxCount, count);
}
}
maxCount = Math.max(maxCount, nClasses); //Include this as header might be bigger than actual values
int numDigits = (int)Math.ceil(Math.log10(maxCount));
if(numDigits < 1)
numDigits = 1;
String digitFormat = "%" + (numDigits+1) + "d";
StringBuilder sb = new StringBuilder();
//Build header:
for( int i=0; i<nClasses; i++ ){
sb.append(String.format(digitFormat, i));
}
sb.append("\n");
int numDividerChars = (numDigits+1) * nClasses + 1;
for( int i=0; i<numDividerChars; i++ ){
sb.append("-");
}
sb.append("\n");
//Build each row:
for( int actual=0; actual<nClasses; actual++){
String actualName = resolveLabelForClass(actual);
for( int predicted=0; predicted<nClasses; predicted++){
int count = confusion.getCount(actual, predicted);
sb.append(String.format(digitFormat, count));
}
sb.append(" | ").append(actual).append(" = ").append(actualName).append("\n");
}
sb.append("\nConfusion matrix format: Actual (rowClass) predicted as (columnClass) N times");
return sb.toString();
}
private static String format(DecimalFormat f, double num) {
if (Double.isNaN(num) || Double.isInfinite(num))
return String.valueOf(num);
return f.format(num);
}
private String resolveLabelForClass(Integer clazz) {
if (labelsList != null && labelsList.size() > clazz)
return labelsList.get(clazz);
return clazz.toString();
}
private void warningHelper(StringBuilder warnings, List<Integer> list, String metric) {
warnings.append("Warning: ").append(list.size()).append(" class");
String wasWere;
if (list.size() == 1) {
wasWere = "was";
} else {
wasWere = "were";
warnings.append("es");
}
warnings.append(" ").append(wasWere);
warnings.append(" never predicted by the model and ").append(wasWere).append(" excluded from average ")
.append(metric).append("\nClasses excluded from average ").append(metric).append(": ")
.append(list).append("\n");
}
/**
* Returns the precision for a given class label
*
* @param classLabel the label
* @return the precision for the label
*/
public double precision(Integer classLabel) {
return precision(classLabel, DEFAULT_EDGE_VALUE);
}
/**
* Returns the precision for a given label
*
* @param classLabel the label
* @param edgeCase What to output in case of 0/0
* @return the precision for the label
*/
public double precision(Integer classLabel, double edgeCase) {
double tpCount = truePositives.getCount(classLabel);
double fpCount = falsePositives.getCount(classLabel);
return EvaluationUtils.precision((long) tpCount, (long) fpCount, edgeCase);
}
/**
* Precision based on guesses so far.<br>
* Note: value returned will differ depending on number of classes and settings.<br>
* 1. For binary classification, if the positive class is set (via default value of 1, via constructor,
* or via {@link #setBinaryPositiveClass(Integer)}), the returned value will be for the specified positive class
* only.<br>
* 2. For the multi-class case, or when {@link #getBinaryPositiveClass()} is null, the returned value is macro-averaged
* across all classes. i.e., is macro-averaged precision, equivalent to {@code precision(EvaluationAveraging.Macro)}<br>
*
* @return the total precision based on guesses so far
*/
public double precision() {
if(binaryPositiveClass != null && numClasses() == 2){
return precision(binaryPositiveClass);
}
return precision(EvaluationAveraging.Macro);
}
/**
* Calculate the average precision for all classes. Can specify whether macro or micro averaging should be used
* NOTE: if any classes have tp=0 and fp=0, (precision=0/0) these are excluded from the average
*
* @param averaging Averaging method - macro or micro
* @return Average precision
*/
public double precision(EvaluationAveraging averaging) {
if(getNumRowCounter() == 0){
return 0.0; //No data
}
int nClasses = confusion().getClasses().size();
if (averaging == EvaluationAveraging.Macro) {
double macroPrecision = 0.0;
int count = 0;
for (int i = 0; i < nClasses; i++) {
double thisClassPrec = precision(i, -1);
if (thisClassPrec != -1) {
macroPrecision += thisClassPrec;
count++;
}
}
macroPrecision /= count;
return macroPrecision;
} else if (averaging == EvaluationAveraging.Micro) {
long tpCount = 0;
long fpCount = 0;
for (int i = 0; i < nClasses; i++) {
tpCount += truePositives.getCount(i);
fpCount += falsePositives.getCount(i);
}
return EvaluationUtils.precision(tpCount, fpCount, DEFAULT_EDGE_VALUE);
} else {
throw new UnsupportedOperationException("Unknown averaging approach: " + averaging);
}
}
/**
* When calculating the (macro) average precision, how many classes are excluded from the average due to
* no predictions – i.e., precision would be the edge case of 0/0
*
* @return Number of classes excluded from the average precision
*/
public int averagePrecisionNumClassesExcluded() {
return numClassesExcluded("precision");
}
/**
* When calculating the (macro) average Recall, how many classes are excluded from the average due to
* no predictions – i.e., recall would be the edge case of 0/0
*
* @return Number of classes excluded from the average recall
*/
public int averageRecallNumClassesExcluded() {
return numClassesExcluded("recall");
}
/**
* When calculating the (macro) average F1, how many classes are excluded from the average due to
* no predictions – i.e., F1 would be calculated from a precision or recall of 0/0
*
* @return Number of classes excluded from the average F1
*/
public int averageF1NumClassesExcluded() {
return numClassesExcluded("f1");
}
/**
* When calculating the (macro) average FBeta, how many classes are excluded from the average due to
* no predictions – i.e., FBeta would be calculated from a precision or recall of 0/0
*
* @return Number of classes excluded from the average FBeta
*/
public int averageFBetaNumClassesExcluded() {
return numClassesExcluded("fbeta");
}
private int numClassesExcluded(String metric) {
int countExcluded = 0;
int nClasses = confusion().getClasses().size();
for (int i = 0; i < nClasses; i++) {
double d;
switch (metric.toLowerCase()) {
case "precision":
d = precision(i, -1);
break;
case "recall":
d = recall(i, -1);
break;
case "f1":
case "fbeta":
d = fBeta(1.0, i, -1);
break;
default:
throw new RuntimeException("Unknown metric: " + metric);
}
if (d == -1) {
countExcluded++;
}
}
return countExcluded;
}
/**
* Returns the recall for a given label
*
* @param classLabel the label
* @return Recall rate as a double
*/
public double recall(int classLabel) {
return recall(classLabel, DEFAULT_EDGE_VALUE);
}
/**
* Returns the recall for a given label
*
* @param classLabel the label
* @param edgeCase What to output in case of 0/0
* @return Recall rate as a double
*/
public double recall(int classLabel, double edgeCase) {
double tpCount = truePositives.getCount(classLabel);
double fnCount = falseNegatives.getCount(classLabel);
return EvaluationUtils.recall((long) tpCount, (long) fnCount, edgeCase);
}
/**
* Recall based on guesses so far<br>
* Note: value returned will differ depending on number of classes and settings.<br>
* 1. For binary classification, if the positive class is set (via default value of 1, via constructor,
* or via {@link #setBinaryPositiveClass(Integer)}), the returned value will be for the specified positive class
* only.<br>
* 2. For the multi-class case, or when {@link #getBinaryPositiveClass()} is null, the returned value is macro-averaged
* across all classes. i.e., is macro-averaged recall, equivalent to {@code recall(EvaluationAveraging.Macro)}<br>
*
* @return the recall for the outcomes
*/
public double recall() {
if(binaryPositiveClass != null && numClasses() == 2){
return recall(binaryPositiveClass);
}
return recall(EvaluationAveraging.Macro);
}
/**
* Calculate the average recall for all classes - can specify whether macro or micro averaging should be used
* NOTE: if any classes have tp=0 and fn=0, (recall=0/0) these are excluded from the average
*
* @param averaging Averaging method - macro or micro
* @return Average recall
*/
public double recall(EvaluationAveraging averaging) {
if(getNumRowCounter() == 0.0){
return 0.0; //No data
}
int nClasses = confusion().getClasses().size();
if (averaging == EvaluationAveraging.Macro) {
double macroRecall = 0.0;
int count = 0;
for (int i = 0; i < nClasses; i++) {
double thisClassRecall = recall(i, -1);
if (thisClassRecall != -1) {
macroRecall += thisClassRecall;
count++;
}
}
macroRecall /= count;
return macroRecall;
} else if (averaging == EvaluationAveraging.Micro) {
long tpCount = 0;
long fnCount = 0;
for (int i = 0; i < nClasses; i++) {
tpCount += truePositives.getCount(i);
fnCount += falseNegatives.getCount(i);
}
return EvaluationUtils.recall(tpCount, fnCount, DEFAULT_EDGE_VALUE);
} else {
throw new UnsupportedOperationException("Unknown averaging approach: " + averaging);
}
}
/**
* Returns the false positive rate for a given label
*
* @param classLabel the label
* @return fpr as a double
*/
public double falsePositiveRate(int classLabel) {
return falsePositiveRate(classLabel, DEFAULT_EDGE_VALUE);
}
/**
* Returns the false positive rate for a given label
*
* @param classLabel the label
* @param edgeCase What to output in case of 0/0
* @return fpr as a double
*/
public double falsePositiveRate(int classLabel, double edgeCase) {
double fpCount = falsePositives.getCount(classLabel);
double tnCount = trueNegatives.getCount(classLabel);
return EvaluationUtils.falsePositiveRate((long) fpCount, (long) tnCount, edgeCase);
}
/**
* False positive rate based on guesses so far<br>
* Note: value returned will differ depending on number of classes and settings.<br>
* 1. For binary classification, if the positive class is set (via default value of 1, via constructor,
* or via {@link #setBinaryPositiveClass(Integer)}), the returned value will be for the specified positive class
* only.<br>
* 2. For the multi-class case, or when {@link #getBinaryPositiveClass()} is null, the returned value is macro-averaged
* across all classes. i.e., is macro-averaged false positive rate, equivalent to
* {@code falsePositiveRate(EvaluationAveraging.Macro)}<br>
*
* @return the fpr for the outcomes
*/
public double falsePositiveRate() {
if(binaryPositiveClass != null && numClasses() == 2){
return falsePositiveRate(binaryPositiveClass);
}
return falsePositiveRate(EvaluationAveraging.Macro);
}
/**
* Calculate the average false positive rate across all classes. Can specify whether macro or micro averaging should be used
*
* @param averaging Averaging method - macro or micro
* @return Average false positive rate
*/
public double falsePositiveRate(EvaluationAveraging averaging) {
int nClasses = confusion().getClasses().size();
if (averaging == EvaluationAveraging.Macro) {
double macroFPR = 0.0;
for (int i = 0; i < nClasses; i++) {
macroFPR += falsePositiveRate(i);
}
macroFPR /= nClasses;
return macroFPR;
} else if (averaging == EvaluationAveraging.Micro) {
long fpCount = 0;
long tnCount = 0;
for (int i = 0; i < nClasses; i++) {
fpCount += falsePositives.getCount(i);
tnCount += trueNegatives.getCount(i);
}
return EvaluationUtils.falsePositiveRate(fpCount, tnCount, DEFAULT_EDGE_VALUE);
} else {
throw new UnsupportedOperationException("Unknown averaging approach: " + averaging);
}
}
/**
* Returns the false negative rate for a given label
*
* @param classLabel the label
* @return fnr as a double
*/
public double falseNegativeRate(Integer classLabel) {
return falseNegativeRate(classLabel, DEFAULT_EDGE_VALUE);
}
/**
* Returns the false negative rate for a given label
*
* @param classLabel the label
* @param edgeCase What to output in case of 0/0
* @return fnr as a double
*/
public double falseNegativeRate(Integer classLabel, double edgeCase) {
double fnCount = falseNegatives.getCount(classLabel);
double tpCount = truePositives.getCount(classLabel);
return EvaluationUtils.falseNegativeRate((long) fnCount, (long) tpCount, edgeCase);
}
/**
* False negative rate based on guesses so far
* Note: value returned will differ depending on number of classes and settings.<br>
* 1. For binary classification, if the positive class is set (via default value of 1, via constructor,
* or via {@link #setBinaryPositiveClass(Integer)}), the returned value will be for the specified positive class
* only.<br>
* 2. For the multi-class case, or when {@link #getBinaryPositiveClass()} is null, the returned value is macro-averaged
* across all classes. i.e., is macro-averaged false negative rate, equivalent to
* {@code falseNegativeRate(EvaluationAveraging.Macro)}<br>
*
* @return the fnr for the outcomes
*/
public double falseNegativeRate() {
if(binaryPositiveClass != null && numClasses() == 2){
return falseNegativeRate(binaryPositiveClass);
}
return falseNegativeRate(EvaluationAveraging.Macro);
}
/**
* Calculate the average false negative rate for all classes - can specify whether macro or micro averaging should be used
*
* @param averaging Averaging method - macro or micro
* @return Average false negative rate
*/
public double falseNegativeRate(EvaluationAveraging averaging) {
int nClasses = confusion().getClasses().size();
if (averaging == EvaluationAveraging.Macro) {
double macroFNR = 0.0;
for (int i = 0; i < nClasses; i++) {
macroFNR += falseNegativeRate(i);
}
macroFNR /= nClasses;
return macroFNR;
} else if (averaging == EvaluationAveraging.Micro) {
long fnCount = 0;
long tnCount = 0;
for (int i = 0; i < nClasses; i++) {
fnCount += falseNegatives.getCount(i);
tnCount += trueNegatives.getCount(i);
}
return EvaluationUtils.falseNegativeRate(fnCount, tnCount, DEFAULT_EDGE_VALUE);
} else {
throw new UnsupportedOperationException("Unknown averaging approach: " + averaging);
}
}
/**
* False Alarm Rate (FAR) reflects rate of misclassified to classified records
* http://ro.ecu.edu.au/cgi/viewcontent.cgi?article=1058&context=isw<br>
* Note: value returned will differ depending on number of classes and settings.<br>
* 1. For binary classification, if the positive class is set (via default value of 1, via constructor,
* or via {@link #setBinaryPositiveClass(Integer)}), the returned value will be for the specified positive class
* only.<br>
* 2. For the multi-class case, or when {@link #getBinaryPositiveClass()} is null, the returned value is macro-averaged
* across all classes. i.e., is macro-averaged false alarm rate)
*
* @return the fpr for the outcomes
*/
public double falseAlarmRate() {
if(binaryPositiveClass != null && numClasses() == 2){
return (falsePositiveRate(binaryPositiveClass) + falseNegativeRate(binaryPositiveClass)) / 2.0;
}
return (falsePositiveRate() + falseNegativeRate()) / 2.0;
}
/**
* Calculate f1 score for a given class
*
* @param classLabel the label to calculate f1 for
* @return the f1 score for the given label
*/
public double f1(int classLabel) {
return fBeta(1.0, classLabel);
}
/**
* Calculate the f_beta for a given class, where f_beta is defined as:<br>
* (1+beta^2) * (precision * recall) / (beta^2 * precision + recall).<br>
* F1 is a special case of f_beta, with beta=1.0
*
* @param beta Beta value to use
* @param classLabel Class label
* @return F_beta
*/
public double fBeta(double beta, int classLabel) {
return fBeta(beta, classLabel, 0.0);
}
/**
* Calculate the f_beta for a given class, where f_beta is defined as:<br>
* (1+beta^2) * (precision * recall) / (beta^2 * precision + recall).<br>
* F1 is a special case of f_beta, with beta=1.0
*
* @param beta Beta value to use
* @param classLabel Class label
* @param defaultValue Default value to use when precision or recall is undefined (0/0 for prec. or recall)
* @return F_beta
*/
public double fBeta(double beta, int classLabel, double defaultValue) {
double precision = precision(classLabel, -1);
double recall = recall(classLabel, -1);
if (precision == -1 || recall == -1) {
return defaultValue;
}
return EvaluationUtils.fBeta(beta, precision, recall);
}
/**
* Calculate the F1 score<br>
* F1 score is defined as:<br>
* TP: true positive<br>
* FP: False Positive<br>
* FN: False Negative<br>
* F1 score: 2 * TP / (2TP + FP + FN)<br>
* <br>
* Note: value returned will differ depending on number of classes and settings.<br>
* 1. For binary classification, if the positive class is set (via default value of 1, via constructor,
* or via {@link #setBinaryPositiveClass(Integer)}), the returned value will be for the specified positive class
* only.<br>
* 2. For the multi-class case, or when {@link #getBinaryPositiveClass()} is null, the returned value is macro-averaged
* across all classes. i.e., is macro-averaged f1, equivalent to {@code f1(EvaluationAveraging.Macro)}<br>
*
* @return the f1 score or harmonic mean of precision and recall based on current guesses
*/
public double f1() {
if(binaryPositiveClass != null && numClasses() == 2){
return f1(binaryPositiveClass);
}
return f1(EvaluationAveraging.Macro);
}
/**
* Calculate the average F1 score across all classes, using macro or micro averaging
*
* @param averaging Averaging method to use
*/
public double f1(EvaluationAveraging averaging) {
return fBeta(1.0, averaging);
}
/**
* Calculate the average F_beta score across all classes, using macro or micro averaging
*
* @param beta Beta value to use
* @param averaging Averaging method to use
*/
public double fBeta(double beta, EvaluationAveraging averaging) {
if(getNumRowCounter() == 0.0){
return Double.NaN; //No data
}
int nClasses = confusion().getClasses().size();
if (nClasses == 2) {
return EvaluationUtils.fBeta(beta, (long) truePositives.getCount(1), (long) falsePositives.getCount(1),
(long) falseNegatives.getCount(1));
}
if (averaging == EvaluationAveraging.Macro) {
double macroFBeta = 0.0;
int count = 0;
for (int i = 0; i < nClasses; i++) {
double thisFBeta = fBeta(beta, i, -1);
if (thisFBeta != -1) {
macroFBeta += thisFBeta;
count++;
}
}
macroFBeta /= count;
return macroFBeta;
} else if (averaging == EvaluationAveraging.Micro) {
long tpCount = 0;
long fpCount = 0;
long fnCount = 0;
for (int i = 0; i < nClasses; i++) {
tpCount += truePositives.getCount(i);
fpCount += falsePositives.getCount(i);
fnCount += falseNegatives.getCount(i);
}
return EvaluationUtils.fBeta(beta, tpCount, fpCount, fnCount);
} else {
throw new UnsupportedOperationException("Unknown averaging approach: " + averaging);
}
}
/**
* Calculate the G-measure for the given output
*
* @param output The specified output
* @return The G-measure for the specified output
*/
public double gMeasure(int output) {
double precision = precision(output);
double recall = recall(output);
return EvaluationUtils.gMeasure(precision, recall);
}
/**
* Calculates the average G measure for all outputs using micro or macro averaging
*
* @param averaging Averaging method to use
* @return Average G measure
*/
public double gMeasure(EvaluationAveraging averaging) {
int nClasses = confusion().getClasses().size();
if (averaging == EvaluationAveraging.Macro) {
double macroGMeasure = 0.0;
for (int i = 0; i < nClasses; i++) {
macroGMeasure += gMeasure(i);
}
macroGMeasure /= nClasses;
return macroGMeasure;
} else if (averaging == EvaluationAveraging.Micro) {
long tpCount = 0;
long fpCount = 0;
long fnCount = 0;
for (int i = 0; i < nClasses; i++) {
tpCount += truePositives.getCount(i);
fpCount += falsePositives.getCount(i);
fnCount += falseNegatives.getCount(i);
}
double precision = EvaluationUtils.precision(tpCount, fpCount, DEFAULT_EDGE_VALUE);
double recall = EvaluationUtils.recall(tpCount, fnCount, DEFAULT_EDGE_VALUE);
return EvaluationUtils.gMeasure(precision, recall);
} else {
throw new UnsupportedOperationException("Unknown averaging approach: " + averaging);
}
}
/**
* Accuracy:
* (TP + TN) / (P + N)
*
* @return the accuracy of the guesses so far
*/
public double accuracy() {
if (getNumRowCounter() == 0) {
return 0.0; //No records
}
//Accuracy: sum the counts on the diagonal of the confusion matrix, divide by total
int nClasses = confusion().getClasses().size();
int countCorrect = 0;
for (int i = 0; i < nClasses; i++) {
countCorrect += confusion().getCount(i, i);
}
return countCorrect / (double) getNumRowCounter();
}
/**
* Top N accuracy of the predictions so far. For top N = 1 (default), equivalent to {@link #accuracy()}
* @return Top N accuracy
*/
public double topNAccuracy() {
if (topN <= 1)
return accuracy();
if (topNTotalCount == 0)
return 0.0;
return topNCorrectCount / (double) topNTotalCount;
}
/**
* Calculate the binary Mathews correlation coefficient, for the specified class.<br>
* MCC = (TP*TN - FP*FN) / sqrt((TP+FP)(TP+FN)(TN+FP)(TN+FN))<br>
*
* @param classIdx Class index to calculate Matthews correlation coefficient for
*/
public double matthewsCorrelation(int classIdx) {
return EvaluationUtils.matthewsCorrelation((long) truePositives.getCount(classIdx),
(long) falsePositives.getCount(classIdx), (long) falseNegatives.getCount(classIdx),
(long) trueNegatives.getCount(classIdx));
}
/**
* Calculate the average binary Mathews correlation coefficient, using macro or micro averaging.<br>
* MCC = (TP*TN - FP*FN) / sqrt((TP+FP)(TP+FN)(TN+FP)(TN+FN))<br>
* Note: This is NOT the same as the multi-class Matthews correlation coefficient
*
* @param averaging Averaging approach
* @return Average
*/
public double matthewsCorrelation(EvaluationAveraging averaging) {
int nClasses = confusion().getClasses().size();
if (averaging == EvaluationAveraging.Macro) {
double macroMatthewsCorrelation = 0.0;
for (int i = 0; i < nClasses; i++) {
macroMatthewsCorrelation += matthewsCorrelation(i);
}
macroMatthewsCorrelation /= nClasses;
return macroMatthewsCorrelation;
} else if (averaging == EvaluationAveraging.Micro) {
long tpCount = 0;
long fpCount = 0;
long fnCount = 0;
long tnCount = 0;
for (int i = 0; i < nClasses; i++) {
tpCount += truePositives.getCount(i);
fpCount += falsePositives.getCount(i);
fnCount += falseNegatives.getCount(i);
tnCount += trueNegatives.getCount(i);
}
return EvaluationUtils.matthewsCorrelation(tpCount, fpCount, fnCount, tnCount);
} else {
throw new UnsupportedOperationException("Unknown averaging approach: " + averaging);
}
}
/**
* True positives: correctly rejected
*
* @return the total true positives so far
*/
public Map<Integer, Integer> truePositives() {
return convertToMap(truePositives, confusion().getClasses().size());
}
/**
* True negatives: correctly rejected
*
* @return the total true negatives so far
*/
public Map<Integer, Integer> trueNegatives() {
return convertToMap(trueNegatives, confusion().getClasses().size());
}
/**
* False positive: wrong guess
*
* @return the count of the false positives
*/
public Map<Integer, Integer> falsePositives() {
return convertToMap(falsePositives, confusion().getClasses().size());
}
/**
* False negatives: correctly rejected
*
* @return the total false negatives so far
*/
public Map<Integer, Integer> falseNegatives() {
return convertToMap(falseNegatives, confusion().getClasses().size());
}
/**
* Total negatives true negatives + false negatives
*
* @return the overall negative count
*/
public Map<Integer, Integer> negative() {
return addMapsByKey(trueNegatives(), falsePositives());
}
/**
* Returns all of the positive guesses:
* true positive + false negative
*/
public Map<Integer, Integer> positive() {
return addMapsByKey(truePositives(), falseNegatives());
}
private Map<Integer, Integer> convertToMap(Counter<Integer> counter, int maxCount) {
Map<Integer, Integer> map = new HashMap<>();
for (int i = 0; i < maxCount; i++) {
map.put(i, (int) counter.getCount(i));
}
return map;
}
private Map<Integer, Integer> addMapsByKey(Map<Integer, Integer> first, Map<Integer, Integer> second) {
Map<Integer, Integer> out = new HashMap<>();
Set<Integer> keys = new HashSet<>(first.keySet());
keys.addAll(second.keySet());
for (Integer i : keys) {
Integer f = first.get(i);
Integer s = second.get(i);
if (f == null)
f = 0;
if (s == null)
s = 0;
out.put(i, f + s);
}
return out;
}
// Incrementing counters
public void incrementTruePositives(Integer classLabel) {
truePositives.incrementCount(classLabel, 1.0f);
}
public void incrementTrueNegatives(Integer classLabel) {
trueNegatives.incrementCount(classLabel, 1.0f);
}
public void incrementFalseNegatives(Integer classLabel) {
falseNegatives.incrementCount(classLabel, 1.0f);
}
public void incrementFalsePositives(Integer classLabel) {
falsePositives.incrementCount(classLabel, 1.0f);
}
// Other misc methods
/**
* Adds to the confusion matrix
*
* @param real the actual guess
* @param guess the system guess
*/
public void addToConfusion(Integer real, Integer guess) {
confusion().add(real, guess);
}
/**
* Returns the number of times the given label
* has actually occurred
*
* @param clazz the label
* @return the number of times the label
* actually occurred
*/
public int classCount(Integer clazz) {
return confusion().getActualTotal(clazz);
}
public int getNumRowCounter() {
return numRowCounter;
}
/**
* Return the number of correct predictions according to top N value. For top N = 1 (default) this is equivalent to
* the number of correct predictions
* @return Number of correct top N predictions
*/
public int getTopNCorrectCount() {
if (confusion == null)
return 0;
if (topN <= 1) {
int nClasses = confusion().getClasses().size();
int countCorrect = 0;
for (int i = 0; i < nClasses; i++) {
countCorrect += confusion().getCount(i, i);
}
return countCorrect;
}
return topNCorrectCount;
}
/**
* Return the total number of top N evaluations. Most of the time, this is exactly equal to {@link #getNumRowCounter()},
* but may differ in the case of using {@link #eval(int, int)} as top N accuracy cannot be calculated in that case
* (i.e., requires the full probability distribution, not just predicted/actual indices)
* @return Total number of top N predictions
*/
public int getTopNTotalCount() {
if (topN <= 1) {
return getNumRowCounter();
}
return topNTotalCount;
}
public String getClassLabel(Integer clazz) {
return resolveLabelForClass(clazz);
}
/**
* Returns the confusion matrix variable
*
* @return confusion matrix variable for this evaluation
*/
public ConfusionMatrix<Integer> getConfusionMatrix() {
return confusion;
}
/**
* Merge the other evaluation object into this one. The result is that this Evaluation instance contains the counts
* etc from both
*
* @param other Evaluation object to merge into this one.
*/
@Override
public void merge(Evaluation other) {
if (other == null)
return;
truePositives.incrementAll(other.truePositives);
falsePositives.incrementAll(other.falsePositives);
trueNegatives.incrementAll(other.trueNegatives);
falseNegatives.incrementAll(other.falseNegatives);
if (confusion == null) {
if (other.confusion != null)
confusion = new ConfusionMatrix<>(other.confusion);
} else {
if (other.confusion != null)
confusion().add(other.confusion);
}
numRowCounter += other.numRowCounter;
if (labelsList.isEmpty())
labelsList.addAll(other.labelsList);
if (topN != other.topN) {
log.warn("Different topN values ({} vs {}) detected during Evaluation merging. Top N accuracy may not be accurate.",
topN, other.topN);
}
this.topNCorrectCount += other.topNCorrectCount;
this.topNTotalCount += other.topNTotalCount;
}
/**
* Get a String representation of the confusion matrix
*/
public String confusionToString() {
int nClasses = confusion().getClasses().size();
//First: work out the longest label size
int maxLabelSize = 0;
for (String s : labelsList) {
maxLabelSize = Math.max(maxLabelSize, s.length());
}
//Build the formatting for the rows:
int labelSize = Math.max(maxLabelSize + 5, 10);
StringBuilder sb = new StringBuilder();
sb.append("%-3d");
sb.append("%-");
sb.append(labelSize);
sb.append("s | ");
StringBuilder headerFormat = new StringBuilder();
headerFormat.append(" %-").append(labelSize).append("s ");
for (int i = 0; i < nClasses; i++) {
sb.append("%7d");
headerFormat.append("%7d");
}
String rowFormat = sb.toString();
StringBuilder out = new StringBuilder();
//First: header row
Object[] headerArgs = new Object[nClasses + 1];
headerArgs[0] = "Predicted:";
for (int i = 0; i < nClasses; i++)
headerArgs[i + 1] = i;
out.append(String.format(headerFormat.toString(), headerArgs)).append("\n");
//Second: divider rows
out.append(" Actual:\n");
//Finally: data rows
for (int i = 0; i < nClasses; i++) {
Object[] args = new Object[nClasses + 2];
args[0] = i;
args[1] = labelsList.get(i);
for (int j = 0; j < nClasses; j++) {
args[j + 2] = confusion().getCount(i, j);
}
out.append(String.format(rowFormat, args));
out.append("\n");
}
return out.toString();
}
private void addToMetaConfusionMatrix(int actual, int predicted, Object metaData) {
if (confusionMatrixMetaData == null) {
confusionMatrixMetaData = new HashMap<>();
}
Pair<Integer, Integer> p = new Pair<>(actual, predicted);
List<Object> list = confusionMatrixMetaData.get(p);
if (list == null) {
list = new ArrayList<>();
confusionMatrixMetaData.put(p, list);
}
list.add(metaData);
}
/**
* Get a list of prediction errors, on a per-record basis<br>
* <p>
* <b>Note</b>: Prediction errors are ONLY available if the "evaluate with metadata" method is used: {@link #eval(INDArray, INDArray, List)}
* Otherwise (if the metadata hasn't been recorded via that previously mentioned eval method), there is no value in
* splitting each prediction out into a separate Prediction object - instead, use the confusion matrix to get the counts,
* via {@link #getConfusionMatrix()}
*
* @return A list of prediction errors, or null if no metadata has been recorded
*/
public List<Prediction> getPredictionErrors() {
if (this.confusionMatrixMetaData == null)
return null;
List<Prediction> list = new ArrayList<>();
List<Map.Entry<Pair<Integer, Integer>, List<Object>>> sorted =
new ArrayList<>(confusionMatrixMetaData.entrySet());
Collections.sort(sorted, new Comparator<Map.Entry<Pair<Integer, Integer>, List<Object>>>() {
@Override
public int compare(Map.Entry<Pair<Integer, Integer>, List<Object>> o1,
Map.Entry<Pair<Integer, Integer>, List<Object>> o2) {
Pair<Integer, Integer> p1 = o1.getKey();
Pair<Integer, Integer> p2 = o2.getKey();
int order = Integer.compare(p1.getFirst(), p2.getFirst());
if (order != 0)
return order;
order = Integer.compare(p1.getSecond(), p2.getSecond());
return order;
}
});
for (Map.Entry<Pair<Integer, Integer>, List<Object>> entry : sorted) {
Pair<Integer, Integer> p = entry.getKey();
if (p.getFirst().equals(p.getSecond())) {
//predicted = actual -> not an error -> skip
continue;
}
for (Object m : entry.getValue()) {
list.add(new Prediction(p.getFirst(), p.getSecond(), m));
}
}
return list;
}
/**
* Get a list of predictions, for all data with the specified <i>actual</i> class, regardless of the predicted
* class.
* <p>
* <b>Note</b>: Prediction errors are ONLY available if the "evaluate with metadata" method is used: {@link #eval(INDArray, INDArray, List)}
* Otherwise (if the metadata hasn't been recorded via that previously mentioned eval method), there is no value in
* splitting each prediction out into a separate Prediction object - instead, use the confusion matrix to get the counts,
* via {@link #getConfusionMatrix()}
*
* @param actualClass Actual class to get predictions for
* @return List of predictions, or null if the "evaluate with metadata" method was not used
*/
public List<Prediction> getPredictionsByActualClass(int actualClass) {
if (confusionMatrixMetaData == null)
return null;
List<Prediction> out = new ArrayList<>();
for (Map.Entry<Pair<Integer, Integer>, List<Object>> entry : confusionMatrixMetaData.entrySet()) { //Entry Pair: (Actual,Predicted)
if (entry.getKey().getFirst() == actualClass) {
int actual = entry.getKey().getFirst();
int predicted = entry.getKey().getSecond();
for (Object m : entry.getValue()) {
out.add(new Prediction(actual, predicted, m));
}
}
}
return out;
}
/**
* Get a list of predictions, for all data with the specified <i>predicted</i> class, regardless of the actual data
* class.
* <p>
* <b>Note</b>: Prediction errors are ONLY available if the "evaluate with metadata" method is used: {@link #eval(INDArray, INDArray, List)}
* Otherwise (if the metadata hasn't been recorded via that previously mentioned eval method), there is no value in
* splitting each prediction out into a separate Prediction object - instead, use the confusion matrix to get the counts,
* via {@link #getConfusionMatrix()}
*
* @param predictedClass Actual class to get predictions for
* @return List of predictions, or null if the "evaluate with metadata" method was not used
*/
public List<Prediction> getPredictionByPredictedClass(int predictedClass) {
if (confusionMatrixMetaData == null)
return null;
List<Prediction> out = new ArrayList<>();
for (Map.Entry<Pair<Integer, Integer>, List<Object>> entry : confusionMatrixMetaData.entrySet()) { //Entry Pair: (Actual,Predicted)
if (entry.getKey().getSecond() == predictedClass) {
int actual = entry.getKey().getFirst();
int predicted = entry.getKey().getSecond();
for (Object m : entry.getValue()) {
out.add(new Prediction(actual, predicted, m));
}
}
}
return out;
}
/**
* Get a list of predictions in the specified confusion matrix entry (i.e., for the given actua/predicted class pair)
*
* @param actualClass Actual class
* @param predictedClass Predicted class
* @return List of predictions that match the specified actual/predicted classes, or null if the "evaluate with metadata" method was not used
*/
public List<Prediction> getPredictions(int actualClass, int predictedClass) {
if (confusionMatrixMetaData == null)
return null;
List<Prediction> out = new ArrayList<>();
List<Object> list = confusionMatrixMetaData.get(new Pair<>(actualClass, predictedClass));
if (list == null)
return out;
for (Object meta : list) {
out.add(new Prediction(actualClass, predictedClass, meta));
}
return out;
}
public double scoreForMetric(Metric metric){
switch (metric){
case ACCURACY:
return accuracy();
case F1:
return f1();
case PRECISION:
return precision();
case RECALL:
return recall();
case GMEASURE:
return gMeasure(EvaluationAveraging.Macro);
case MCC:
return matthewsCorrelation(EvaluationAveraging.Macro);
default:
throw new IllegalStateException("Unknown metric: " + metric);
}
}
public static Evaluation fromJson(String json) {
return fromJson(json, Evaluation.class);
}
public static Evaluation fromYaml(String yaml) {
return fromYaml(yaml, Evaluation.class);
}
}
|
/*
* This is the source code of Telegram for Android v. 5.x.x.
* It is licensed under GNU GPL v. 2 or later.
* You should have received a copy of the license in this archive (see LICENSE).
*
* Copyright Nikolai Kudashov, 2013-2018.
*/
package org.telegram.ui.Components;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.os.Build;
import android.text.Editable;
import android.text.Layout;
import android.text.Spanned;
import android.text.StaticLayout;
import android.text.TextPaint;
import android.text.TextUtils;
import android.text.style.CharacterStyle;
import android.util.TypedValue;
import android.view.ActionMode;
import android.view.Gravity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.accessibility.AccessibilityNodeInfo;
import android.view.inputmethod.EditorInfo;
import android.widget.FrameLayout;
import org.telegram.messenger.AndroidUtilities;
import org.telegram.messenger.FileLog;
import org.telegram.messenger.LocaleController;
import org.telegram.messenger.MediaDataController;
import org.telegram.messenger.R;
import org.telegram.ui.ActionBar.AlertDialog;
import org.telegram.ui.ActionBar.Theme;
public class EditTextCaption extends EditTextBoldCursor {
private String caption;
private StaticLayout captionLayout;
private int userNameLength;
private int xOffset;
private int yOffset;
private int triesCount = 0;
private boolean copyPasteShowed;
private int hintColor;
private EditTextCaptionDelegate delegate;
private int selectionStart = -1;
private int selectionEnd = -1;
private boolean allowTextEntitiesIntersection;
public interface EditTextCaptionDelegate {
void onSpansChanged();
}
public EditTextCaption(Context context) {
super(context);
}
public void setCaption(String value) {
if ((caption == null || caption.length() == 0) && (value == null || value.length() == 0) || caption != null && caption.equals(value)) {
return;
}
caption = value;
if (caption != null) {
caption = caption.replace('\n', ' ');
}
requestLayout();
}
public void setDelegate(EditTextCaptionDelegate editTextCaptionDelegate) {
delegate = editTextCaptionDelegate;
}
public void setAllowTextEntitiesIntersection(boolean value) {
allowTextEntitiesIntersection = value;
}
public void makeSelectedBold() {
TextStyleSpan.TextStyleRun run = new TextStyleSpan.TextStyleRun();
run.flags |= TextStyleSpan.FLAG_STYLE_BOLD;
applyTextStyleToSelection(new TextStyleSpan(run));
}
public void makeSelectedItalic() {
TextStyleSpan.TextStyleRun run = new TextStyleSpan.TextStyleRun();
run.flags |= TextStyleSpan.FLAG_STYLE_ITALIC;
applyTextStyleToSelection(new TextStyleSpan(run));
}
public void makeSelectedMono() {
TextStyleSpan.TextStyleRun run = new TextStyleSpan.TextStyleRun();
run.flags |= TextStyleSpan.FLAG_STYLE_MONO;
applyTextStyleToSelection(new TextStyleSpan(run));
}
public void makeSelectedStrike() {
TextStyleSpan.TextStyleRun run = new TextStyleSpan.TextStyleRun();
run.flags |= TextStyleSpan.FLAG_STYLE_STRIKE;
applyTextStyleToSelection(new TextStyleSpan(run));
}
public void makeSelectedUnderline() {
TextStyleSpan.TextStyleRun run = new TextStyleSpan.TextStyleRun();
run.flags |= TextStyleSpan.FLAG_STYLE_UNDERLINE;
applyTextStyleToSelection(new TextStyleSpan(run));
}
public void makeSelectedUrl() {
AlertDialog.Builder builder = new AlertDialog.Builder(getContext());
builder.setTitle(LocaleController.getString("CreateLink", R.string.CreateLink));
final EditTextBoldCursor editText = new EditTextBoldCursor(getContext()) {
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, MeasureSpec.makeMeasureSpec(AndroidUtilities.dp(64), MeasureSpec.EXACTLY));
}
};
editText.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 18);
editText.setText("http://");
editText.setTextColor(Theme.getColor(Theme.key_dialogTextBlack));
editText.setHintText(LocaleController.getString("URL", R.string.URL));
editText.setHeaderHintColor(Theme.getColor(Theme.key_windowBackgroundWhiteBlueHeader));
editText.setSingleLine(true);
editText.setFocusable(true);
editText.setTransformHintToHeader(true);
editText.setLineColors(Theme.getColor(Theme.key_windowBackgroundWhiteInputField), Theme.getColor(Theme.key_windowBackgroundWhiteInputFieldActivated), Theme.getColor(Theme.key_windowBackgroundWhiteRedText3));
editText.setImeOptions(EditorInfo.IME_ACTION_DONE);
editText.setBackgroundDrawable(null);
editText.requestFocus();
editText.setPadding(0, 0, 0, 0);
builder.setView(editText);
final int start;
final int end;
if (selectionStart >= 0 && selectionEnd >= 0) {
start = selectionStart;
end = selectionEnd;
selectionStart = selectionEnd = -1;
} else {
start = getSelectionStart();
end = getSelectionEnd();
}
builder.setPositiveButton(LocaleController.getString("OK", R.string.OK), (dialogInterface, i) -> {
Editable editable = getText();
CharacterStyle[] spans = editable.getSpans(start, end, CharacterStyle.class);
if (spans != null && spans.length > 0) {
for (int a = 0; a < spans.length; a++) {
CharacterStyle oldSpan = spans[a];
int spanStart = editable.getSpanStart(oldSpan);
int spanEnd = editable.getSpanEnd(oldSpan);
editable.removeSpan(oldSpan);
if (spanStart < start) {
editable.setSpan(oldSpan, spanStart, start, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
if (spanEnd > end) {
editable.setSpan(oldSpan, end, spanEnd, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
}
}
try {
editable.setSpan(new URLSpanReplacement(editText.getText().toString()), start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
} catch (Exception ignore) {
}
if (delegate != null) {
delegate.onSpansChanged();
}
});
builder.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), null);
builder.show().setOnShowListener(dialog -> {
editText.requestFocus();
AndroidUtilities.showKeyboard(editText);
});
if (editText != null) {
ViewGroup.MarginLayoutParams layoutParams = (ViewGroup.MarginLayoutParams) editText.getLayoutParams();
if (layoutParams != null) {
if (layoutParams instanceof FrameLayout.LayoutParams) {
((FrameLayout.LayoutParams) layoutParams).gravity = Gravity.CENTER_HORIZONTAL;
}
layoutParams.rightMargin = layoutParams.leftMargin = AndroidUtilities.dp(24);
layoutParams.height = AndroidUtilities.dp(36);
editText.setLayoutParams(layoutParams);
}
editText.setSelection(0, editText.getText().length());
}
}
public void makeSelectedRegular() {
applyTextStyleToSelection(null);
}
public void setSelectionOverride(int start, int end) {
selectionStart = start;
selectionEnd = end;
}
private void applyTextStyleToSelection(TextStyleSpan span) {
int start;
int end;
if (selectionStart >= 0 && selectionEnd >= 0) {
start = selectionStart;
end = selectionEnd;
selectionStart = selectionEnd = -1;
} else {
start = getSelectionStart();
end = getSelectionEnd();
}
MediaDataController.addStyleToText(span, start, end, getText(), allowTextEntitiesIntersection);
if (delegate != null) {
delegate.onSpansChanged();
}
}
@Override
public void onWindowFocusChanged(boolean hasWindowFocus) {
if (Build.VERSION.SDK_INT < 23 && !hasWindowFocus && copyPasteShowed) {
return;
}
super.onWindowFocusChanged(hasWindowFocus);
}
private ActionMode.Callback overrideCallback(final ActionMode.Callback callback) {
ActionMode.Callback wrap = new ActionMode.Callback() {
@Override
public boolean onCreateActionMode(ActionMode mode, Menu menu) {
copyPasteShowed = true;
return callback.onCreateActionMode(mode, menu);
}
@Override
public boolean onPrepareActionMode(ActionMode mode, Menu menu) {
return callback.onPrepareActionMode(mode, menu);
}
@Override
public boolean onActionItemClicked(ActionMode mode, MenuItem item) {
if (item.getItemId() == R.id.menu_regular) {
makeSelectedRegular();
mode.finish();
return true;
} else if (item.getItemId() == R.id.menu_bold) {
makeSelectedBold();
mode.finish();
return true;
} else if (item.getItemId() == R.id.menu_italic) {
makeSelectedItalic();
mode.finish();
return true;
} else if (item.getItemId() == R.id.menu_mono) {
makeSelectedMono();
mode.finish();
return true;
} else if (item.getItemId() == R.id.menu_link) {
makeSelectedUrl();
mode.finish();
return true;
} else if (item.getItemId() == R.id.menu_strike) {
makeSelectedStrike();
mode.finish();
return true;
} else if (item.getItemId() == R.id.menu_underline) {
makeSelectedUnderline();
mode.finish();
return true;
}
try {
return callback.onActionItemClicked(mode, item);
} catch (Exception ignore) {
}
return true;
}
@Override
public void onDestroyActionMode(ActionMode mode) {
copyPasteShowed = false;
callback.onDestroyActionMode(mode);
}
};
if (Build.VERSION.SDK_INT >= 23) {
return new ActionMode.Callback2() {
@Override
public boolean onCreateActionMode(ActionMode mode, Menu menu) {
return wrap.onCreateActionMode(mode, menu);
}
@Override
public boolean onPrepareActionMode(ActionMode mode, Menu menu) {
return wrap.onPrepareActionMode(mode, menu);
}
@Override
public boolean onActionItemClicked(ActionMode mode, MenuItem item) {
return wrap.onActionItemClicked(mode, item);
}
@Override
public void onDestroyActionMode(ActionMode mode) {
wrap.onDestroyActionMode(mode);
}
@Override
public void onGetContentRect(ActionMode mode, View view, Rect outRect) {
if (callback instanceof ActionMode.Callback2) {
((ActionMode.Callback2) callback).onGetContentRect(mode, view, outRect);
} else {
super.onGetContentRect(mode, view, outRect);
}
}
};
} else {
return wrap;
}
}
@Override
public ActionMode startActionMode(final ActionMode.Callback callback, int type) {
return super.startActionMode(overrideCallback(callback), type);
}
@Override
public ActionMode startActionMode(final ActionMode.Callback callback) {
return super.startActionMode(overrideCallback(callback));
}
@SuppressLint("DrawAllocation")
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
try {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
} catch (Exception e) {
setMeasuredDimension(MeasureSpec.getSize(widthMeasureSpec), AndroidUtilities.dp(51));
FileLog.e(e);
}
captionLayout = null;
if (caption != null && caption.length() > 0) {
CharSequence text = getText();
if (text.length() > 1 && text.charAt(0) == '@') {
int index = TextUtils.indexOf(text, ' ');
if (index != -1) {
TextPaint paint = getPaint();
CharSequence str = text.subSequence(0, index + 1);
int size = (int) Math.ceil(paint.measureText(text, 0, index + 1));
int width = getMeasuredWidth() - getPaddingLeft() - getPaddingRight();
userNameLength = str.length();
CharSequence captionFinal = TextUtils.ellipsize(caption, paint, width - size, TextUtils.TruncateAt.END);
xOffset = size;
try {
captionLayout = new StaticLayout(captionFinal, getPaint(), width - size, Layout.Alignment.ALIGN_NORMAL, 1.0f, 0.0f, false);
if (captionLayout.getLineCount() > 0) {
xOffset += -captionLayout.getLineLeft(0);
}
yOffset = (getMeasuredHeight() - captionLayout.getLineBottom(0)) / 2 + AndroidUtilities.dp(0.5f);
} catch (Exception e) {
FileLog.e(e);
}
}
}
}
}
public String getCaption() {
return caption;
}
@Override
public void setHintColor(int value) {
super.setHintColor(value);
hintColor = value;
invalidate();
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
try {
if (captionLayout != null && userNameLength == length()) {
Paint paint = getPaint();
int oldColor = getPaint().getColor();
paint.setColor(hintColor);
canvas.save();
canvas.translate(xOffset, yOffset);
captionLayout.draw(canvas);
canvas.restore();
paint.setColor(oldColor);
}
} catch (Exception e) {
FileLog.e(e);
}
}
@Override
public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) {
super.onInitializeAccessibilityNodeInfo(info);
if (!TextUtils.isEmpty(caption)) {
if (Build.VERSION.SDK_INT >= 26) {
info.setHintText(caption);
} else {
info.setText(info.getText() + ", " + caption);
}
}
}
}
|
package evergarden.busydlg;
import javax.swing.JOptionPane;
import charlotte.tools.FileTools;
public class Test01 {
public static void main(String[] args) {
try {
test01();
System.out.println("OK!");
}
catch(Throwable e) {
e.printStackTrace();
}
}
private static void test01() throws Exception {
BusyDlg bd = null;
try {
JOptionPane.showMessageDialog(
null,
"開始します。",
"情報",
JOptionPane.INFORMATION_MESSAGE
);
bd = new BusyDlg("裏で何か処理しています...", "お待ち下さい");
for(int c = 15; 0 < c; c--) {
System.out.println("あと " + c + " 秒...");
Thread.sleep(1000);
}
FileTools.close(bd);
bd = null;
JOptionPane.showMessageDialog(
null,
"終了しました。",
"情報",
JOptionPane.INFORMATION_MESSAGE
);
}
finally {
FileTools.close(bd);
bd = null;
}
}
}
|
package com.egoveris.numerador.base.repository;
import org.springframework.data.jpa.repository.JpaRepository;
import com.egoveris.numerador.base.model.NumeroCaratulaSector;
public interface NumeroCaratulaSectorRepository extends JpaRepository<NumeroCaratulaSector, Integer> {
NumeroCaratulaSector findByUsuario(String usuario);
}
|
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.condition;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.Collection;
import java.util.Date;
import java.util.function.Consumer;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.boot.autoconfigure.condition.scan.ScanBean;
import org.springframework.boot.autoconfigure.condition.scan.ScannedFactoryBeanConfiguration;
import org.springframework.boot.autoconfigure.condition.scan.ScannedFactoryBeanWithBeanMethodArgumentsConfiguration;
import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.ComponentScan.Filter;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.FilterType;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.ImportBeanDefinitionRegistrar;
import org.springframework.context.annotation.ImportResource;
import org.springframework.core.type.AnnotationMetadata;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link ConditionalOnMissingBean @ConditionalOnMissingBean}.
*
* @author Dave Syer
* @author Phillip Webb
* @author Jakub Kubrynski
* @author Andy Wilkinson
*/
@SuppressWarnings("resource")
class ConditionalOnMissingBeanTests {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner();
@Test
void testNameOnMissingBeanCondition() {
this.contextRunner.withUserConfiguration(FooConfiguration.class, OnBeanNameConfiguration.class)
.run((context) -> {
assertThat(context).doesNotHaveBean("bar");
assertThat(context.getBean("foo")).isEqualTo("foo");
});
}
@Test
void testNameOnMissingBeanConditionReverseOrder() {
this.contextRunner.withUserConfiguration(OnBeanNameConfiguration.class, FooConfiguration.class)
.run((context) -> {
// Ideally this would be doesNotHaveBean, but the ordering is a
// problem
assertThat(context).hasBean("bar");
assertThat(context.getBean("foo")).isEqualTo("foo");
});
}
@Test
void testNameAndTypeOnMissingBeanCondition() {
// Arguably this should be hasBean, but as things are implemented the conditions
// specified in the different attributes of @ConditionalOnBean are combined with
// logical OR (not AND) so if any of them match the condition is true.
this.contextRunner.withUserConfiguration(FooConfiguration.class, OnBeanNameAndTypeConfiguration.class)
.run((context) -> assertThat(context).doesNotHaveBean("bar"));
}
@Test
void hierarchyConsidered() {
this.contextRunner.withUserConfiguration(FooConfiguration.class)
.run((parent) -> new ApplicationContextRunner().withParent(parent)
.withUserConfiguration(HierarchyConsidered.class)
.run((context) -> assertThat(context.containsLocalBean("bar")).isFalse()));
}
@Test
void hierarchyNotConsidered() {
this.contextRunner.withUserConfiguration(FooConfiguration.class)
.run((parent) -> new ApplicationContextRunner().withParent(parent)
.withUserConfiguration(HierarchyNotConsidered.class)
.run((context) -> assertThat(context.containsLocalBean("bar")).isTrue()));
}
@Test
void impliedOnBeanMethod() {
this.contextRunner.withUserConfiguration(ExampleBeanConfiguration.class, ImpliedOnBeanMethod.class)
.run((context) -> assertThat(context).hasSingleBean(ExampleBean.class));
}
@Test
void testAnnotationOnMissingBeanCondition() {
this.contextRunner.withUserConfiguration(FooConfiguration.class, OnAnnotationConfiguration.class)
.run((context) -> {
assertThat(context).doesNotHaveBean("bar");
assertThat(context.getBean("foo")).isEqualTo("foo");
});
}
@Test
void testAnnotationOnMissingBeanConditionWithEagerFactoryBean() {
// Rigorous test for SPR-11069
this.contextRunner
.withUserConfiguration(FooConfiguration.class, OnAnnotationConfiguration.class,
FactoryBeanXmlConfiguration.class, PropertyPlaceholderAutoConfiguration.class)
.run((context) -> {
assertThat(context).doesNotHaveBean("bar");
assertThat(context).hasBean("example");
assertThat(context.getBean("foo")).isEqualTo("foo");
});
}
@Test
void testOnMissingBeanConditionOutputShouldNotContainConditionalOnBeanClassInMessage() {
this.contextRunner.withUserConfiguration(OnBeanNameConfiguration.class).run((context) -> {
Collection<ConditionEvaluationReport.ConditionAndOutcomes> conditionAndOutcomes = ConditionEvaluationReport
.get(context.getSourceApplicationContext().getBeanFactory()).getConditionAndOutcomesBySource()
.values();
String message = conditionAndOutcomes.iterator().next().iterator().next().getOutcome().getMessage();
assertThat(message).doesNotContain("@ConditionalOnBean");
});
}
@Test
void testOnMissingBeanConditionWithFactoryBean() {
this.contextRunner
.withUserConfiguration(FactoryBeanConfiguration.class, ConditionalOnFactoryBean.class,
PropertyPlaceholderAutoConfiguration.class)
.run((context) -> assertThat(context.getBean(ExampleBean.class).toString()).isEqualTo("fromFactory"));
}
@Test
void testOnMissingBeanConditionWithComponentScannedFactoryBean() {
this.contextRunner
.withUserConfiguration(ComponentScannedFactoryBeanBeanMethodConfiguration.class,
ConditionalOnFactoryBean.class, PropertyPlaceholderAutoConfiguration.class)
.run((context) -> assertThat(context.getBean(ScanBean.class).toString()).isEqualTo("fromFactory"));
}
@Test
void testOnMissingBeanConditionWithComponentScannedFactoryBeanWithBeanMethodArguments() {
this.contextRunner
.withUserConfiguration(ComponentScannedFactoryBeanBeanMethodWithArgumentsConfiguration.class,
ConditionalOnFactoryBean.class, PropertyPlaceholderAutoConfiguration.class)
.run((context) -> assertThat(context.getBean(ScanBean.class).toString()).isEqualTo("fromFactory"));
}
@Test
void testOnMissingBeanConditionWithFactoryBeanWithBeanMethodArguments() {
this.contextRunner
.withUserConfiguration(FactoryBeanWithBeanMethodArgumentsConfiguration.class,
ConditionalOnFactoryBean.class, PropertyPlaceholderAutoConfiguration.class)
.withPropertyValues("theValue=foo")
.run((context) -> assertThat(context.getBean(ExampleBean.class).toString()).isEqualTo("fromFactory"));
}
@Test
void testOnMissingBeanConditionWithConcreteFactoryBean() {
this.contextRunner
.withUserConfiguration(ConcreteFactoryBeanConfiguration.class, ConditionalOnFactoryBean.class,
PropertyPlaceholderAutoConfiguration.class)
.run((context) -> assertThat(context.getBean(ExampleBean.class).toString()).isEqualTo("fromFactory"));
}
@Test
void testOnMissingBeanConditionWithUnhelpfulFactoryBean() {
// We could not tell that the FactoryBean would ultimately create an ExampleBean
this.contextRunner
.withUserConfiguration(UnhelpfulFactoryBeanConfiguration.class, ConditionalOnFactoryBean.class,
PropertyPlaceholderAutoConfiguration.class)
.run((context) -> assertThat(context).getBeans(ExampleBean.class).hasSize(2));
}
@Test
void testOnMissingBeanConditionWithRegisteredFactoryBean() {
this.contextRunner
.withUserConfiguration(RegisteredFactoryBeanConfiguration.class, ConditionalOnFactoryBean.class,
PropertyPlaceholderAutoConfiguration.class)
.run((context) -> assertThat(context.getBean(ExampleBean.class).toString()).isEqualTo("fromFactory"));
}
@Test
void testOnMissingBeanConditionWithNonspecificFactoryBeanWithClassAttribute() {
this.contextRunner
.withUserConfiguration(NonspecificFactoryBeanClassAttributeConfiguration.class,
ConditionalOnFactoryBean.class, PropertyPlaceholderAutoConfiguration.class)
.run((context) -> assertThat(context.getBean(ExampleBean.class).toString()).isEqualTo("fromFactory"));
}
@Test
void testOnMissingBeanConditionWithNonspecificFactoryBeanWithStringAttribute() {
this.contextRunner
.withUserConfiguration(NonspecificFactoryBeanStringAttributeConfiguration.class,
ConditionalOnFactoryBean.class, PropertyPlaceholderAutoConfiguration.class)
.run((context) -> assertThat(context.getBean(ExampleBean.class).toString()).isEqualTo("fromFactory"));
}
@Test
void testOnMissingBeanConditionWithFactoryBeanInXml() {
this.contextRunner
.withUserConfiguration(FactoryBeanXmlConfiguration.class, ConditionalOnFactoryBean.class,
PropertyPlaceholderAutoConfiguration.class)
.run((context) -> assertThat(context.getBean(ExampleBean.class).toString()).isEqualTo("fromFactory"));
}
@Test
void testOnMissingBeanConditionWithIgnoredSubclass() {
this.contextRunner.withUserConfiguration(CustomExampleBeanConfiguration.class,
ConditionalOnIgnoredSubclass.class, PropertyPlaceholderAutoConfiguration.class).run((context) -> {
assertThat(context).getBeans(ExampleBean.class).hasSize(2);
assertThat(context).getBeans(CustomExampleBean.class).hasSize(1);
});
}
@Test
void testOnMissingBeanConditionWithIgnoredSubclassByName() {
this.contextRunner.withUserConfiguration(CustomExampleBeanConfiguration.class,
ConditionalOnIgnoredSubclassByName.class, PropertyPlaceholderAutoConfiguration.class).run((context) -> {
assertThat(context).getBeans(ExampleBean.class).hasSize(2);
assertThat(context).getBeans(CustomExampleBean.class).hasSize(1);
});
}
@Test
void grandparentIsConsideredWhenUsingAncestorsStrategy() {
this.contextRunner.withUserConfiguration(ExampleBeanConfiguration.class)
.run((grandparent) -> new ApplicationContextRunner().withParent(grandparent)
.run((parent) -> new ApplicationContextRunner().withParent(parent)
.withUserConfiguration(ExampleBeanConfiguration.class,
OnBeanInAncestorsConfiguration.class)
.run((context) -> assertThat(context).getBeans(ExampleBean.class).hasSize(1))));
}
@Test
void currentContextIsIgnoredWhenUsingAncestorsStrategy() {
this.contextRunner.run((parent) -> new ApplicationContextRunner().withParent(parent)
.withUserConfiguration(ExampleBeanConfiguration.class, OnBeanInAncestorsConfiguration.class)
.run((context) -> assertThat(context).getBeans(ExampleBean.class).hasSize(2)));
}
@Test
void beanProducedByFactoryBeanIsConsideredWhenMatchingOnAnnotation() {
this.contextRunner.withUserConfiguration(ConcreteFactoryBeanConfiguration.class,
OnAnnotationWithFactoryBeanConfiguration.class).run((context) -> {
assertThat(context).doesNotHaveBean("bar");
assertThat(context).hasSingleBean(ExampleBean.class);
});
}
@Test
void parameterizedContainerWhenValueIsOfMissingBeanMatches() {
this.contextRunner
.withUserConfiguration(ParameterizedWithoutCustomConfig.class,
ParameterizedConditionWithValueConfig.class)
.run((context) -> assertThat(context)
.satisfies(exampleBeanRequirement("otherExampleBean", "conditionalCustomExampleBean")));
}
@Test
void parameterizedContainerWhenValueIsOfExistingBeanDoesNotMatch() {
this.contextRunner
.withUserConfiguration(ParameterizedWithCustomConfig.class, ParameterizedConditionWithValueConfig.class)
.run((context) -> assertThat(context).satisfies(exampleBeanRequirement("customExampleBean")));
}
@Test
void parameterizedContainerWhenValueIsOfMissingBeanRegistrationMatches() {
this.contextRunner
.withUserConfiguration(ParameterizedWithoutCustomContainerConfig.class,
ParameterizedConditionWithValueConfig.class)
.run((context) -> assertThat(context)
.satisfies(exampleBeanRequirement("otherExampleBean", "conditionalCustomExampleBean")));
}
@Test
void parameterizedContainerWhenValueIsOfExistingBeanRegistrationDoesNotMatch() {
this.contextRunner
.withUserConfiguration(ParameterizedWithCustomContainerConfig.class,
ParameterizedConditionWithValueConfig.class)
.run((context) -> assertThat(context).satisfies(exampleBeanRequirement("customExampleBean")));
}
@Test
void parameterizedContainerWhenReturnTypeIsOfExistingBeanDoesNotMatch() {
this.contextRunner
.withUserConfiguration(ParameterizedWithCustomConfig.class,
ParameterizedConditionWithReturnTypeConfig.class)
.run((context) -> assertThat(context).satisfies(exampleBeanRequirement("customExampleBean")));
}
@Test
void parameterizedContainerWhenReturnTypeIsOfExistingBeanRegistrationDoesNotMatch() {
this.contextRunner
.withUserConfiguration(ParameterizedWithCustomContainerConfig.class,
ParameterizedConditionWithReturnTypeConfig.class)
.run((context) -> assertThat(context).satisfies(exampleBeanRequirement("customExampleBean")));
}
@Test
void parameterizedContainerWhenReturnRegistrationTypeIsOfExistingBeanDoesNotMatch() {
this.contextRunner
.withUserConfiguration(ParameterizedWithCustomConfig.class,
ParameterizedConditionWithReturnRegistrationTypeConfig.class)
.run((context) -> assertThat(context).satisfies(exampleBeanRequirement("customExampleBean")));
}
@Test
void parameterizedContainerWhenReturnRegistrationTypeIsOfExistingBeanRegistrationDoesNotMatch() {
this.contextRunner
.withUserConfiguration(ParameterizedWithCustomContainerConfig.class,
ParameterizedConditionWithReturnRegistrationTypeConfig.class)
.run((context) -> assertThat(context).satisfies(exampleBeanRequirement("customExampleBean")));
}
private Consumer<ConfigurableApplicationContext> exampleBeanRequirement(String... names) {
return (context) -> {
String[] beans = context.getBeanNamesForType(ExampleBean.class);
String[] containers = context.getBeanNamesForType(TestParameterizedContainer.class);
assertThat(StringUtils.concatenateStringArrays(beans, containers)).containsOnly(names);
};
}
@Configuration(proxyBeanMethods = false)
static class OnBeanInAncestorsConfiguration {
@Bean
@ConditionalOnMissingBean(search = SearchStrategy.ANCESTORS)
ExampleBean exampleBean2() {
return new ExampleBean("test");
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnMissingBean(name = "foo")
static class OnBeanNameConfiguration {
@Bean
String bar() {
return "bar";
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnMissingBean(name = "foo", value = Date.class)
@ConditionalOnBean(name = "foo", value = Date.class)
static class OnBeanNameAndTypeConfiguration {
@Bean
String bar() {
return "bar";
}
}
@Configuration(proxyBeanMethods = false)
static class FactoryBeanConfiguration {
@Bean
FactoryBean<ExampleBean> exampleBeanFactoryBean() {
return new ExampleFactoryBean("foo");
}
}
@Configuration(proxyBeanMethods = false)
@ComponentScan(basePackages = "org.springframework.boot.autoconfigure.condition.scan", useDefaultFilters = false,
includeFilters = @Filter(type = FilterType.ASSIGNABLE_TYPE,
classes = ScannedFactoryBeanConfiguration.class))
static class ComponentScannedFactoryBeanBeanMethodConfiguration {
}
@Configuration(proxyBeanMethods = false)
@ComponentScan(basePackages = "org.springframework.boot.autoconfigure.condition.scan", useDefaultFilters = false,
includeFilters = @Filter(type = FilterType.ASSIGNABLE_TYPE,
classes = ScannedFactoryBeanWithBeanMethodArgumentsConfiguration.class))
static class ComponentScannedFactoryBeanBeanMethodWithArgumentsConfiguration {
}
@Configuration(proxyBeanMethods = false)
static class FactoryBeanWithBeanMethodArgumentsConfiguration {
@Bean
FactoryBean<ExampleBean> exampleBeanFactoryBean(@Value("${theValue}") String value) {
return new ExampleFactoryBean(value);
}
}
@Configuration(proxyBeanMethods = false)
static class ConcreteFactoryBeanConfiguration {
@Bean
ExampleFactoryBean exampleBeanFactoryBean() {
return new ExampleFactoryBean("foo");
}
}
@Configuration(proxyBeanMethods = false)
static class UnhelpfulFactoryBeanConfiguration {
@Bean
@SuppressWarnings("rawtypes")
FactoryBean exampleBeanFactoryBean() {
return new ExampleFactoryBean("foo");
}
}
@Configuration(proxyBeanMethods = false)
@Import(NonspecificFactoryBeanClassAttributeRegistrar.class)
static class NonspecificFactoryBeanClassAttributeConfiguration {
}
static class NonspecificFactoryBeanClassAttributeRegistrar implements ImportBeanDefinitionRegistrar {
@Override
public void registerBeanDefinitions(AnnotationMetadata meta, BeanDefinitionRegistry registry) {
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(NonspecificFactoryBean.class);
builder.addConstructorArgValue("foo");
builder.getBeanDefinition().setAttribute(FactoryBean.OBJECT_TYPE_ATTRIBUTE, ExampleBean.class);
registry.registerBeanDefinition("exampleBeanFactoryBean", builder.getBeanDefinition());
}
}
@Configuration(proxyBeanMethods = false)
@Import(NonspecificFactoryBeanClassAttributeRegistrar.class)
static class NonspecificFactoryBeanStringAttributeConfiguration {
}
static class NonspecificFactoryBeanStringAttributeRegistrar implements ImportBeanDefinitionRegistrar {
@Override
public void registerBeanDefinitions(AnnotationMetadata meta, BeanDefinitionRegistry registry) {
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(NonspecificFactoryBean.class);
builder.addConstructorArgValue("foo");
builder.getBeanDefinition().setAttribute(FactoryBean.OBJECT_TYPE_ATTRIBUTE, ExampleBean.class.getName());
registry.registerBeanDefinition("exampleBeanFactoryBean", builder.getBeanDefinition());
}
}
@Configuration(proxyBeanMethods = false)
@Import(FactoryBeanRegistrar.class)
static class RegisteredFactoryBeanConfiguration {
}
static class FactoryBeanRegistrar implements ImportBeanDefinitionRegistrar {
@Override
public void registerBeanDefinitions(AnnotationMetadata meta, BeanDefinitionRegistry registry) {
BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ExampleFactoryBean.class);
builder.addConstructorArgValue("foo");
registry.registerBeanDefinition("exampleBeanFactoryBean", builder.getBeanDefinition());
}
}
@Configuration(proxyBeanMethods = false)
@ImportResource("org/springframework/boot/autoconfigure/condition/factorybean.xml")
static class FactoryBeanXmlConfiguration {
}
@Configuration(proxyBeanMethods = false)
static class ConditionalOnFactoryBean {
@Bean
@ConditionalOnMissingBean(ExampleBean.class)
ExampleBean createExampleBean() {
return new ExampleBean("direct");
}
}
@Configuration(proxyBeanMethods = false)
static class ConditionalOnIgnoredSubclass {
@Bean
@ConditionalOnMissingBean(value = ExampleBean.class, ignored = CustomExampleBean.class)
ExampleBean exampleBean() {
return new ExampleBean("test");
}
}
@Configuration(proxyBeanMethods = false)
static class ConditionalOnIgnoredSubclassByName {
@Bean
@ConditionalOnMissingBean(value = ExampleBean.class,
ignoredType = "org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBeanTests$CustomExampleBean")
ExampleBean exampleBean() {
return new ExampleBean("test");
}
}
@Configuration(proxyBeanMethods = false)
static class CustomExampleBeanConfiguration {
@Bean
CustomExampleBean customExampleBean() {
return new CustomExampleBean();
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnMissingBean(annotation = EnableScheduling.class)
static class OnAnnotationConfiguration {
@Bean
String bar() {
return "bar";
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnMissingBean(annotation = TestAnnotation.class)
static class OnAnnotationWithFactoryBeanConfiguration {
@Bean
String bar() {
return "bar";
}
}
@Configuration(proxyBeanMethods = false)
@EnableScheduling
static class FooConfiguration {
@Bean
String foo() {
return "foo";
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnMissingBean(name = "foo")
static class HierarchyConsidered {
@Bean
String bar() {
return "bar";
}
}
@Configuration(proxyBeanMethods = false)
@ConditionalOnMissingBean(name = "foo", search = SearchStrategy.CURRENT)
static class HierarchyNotConsidered {
@Bean
String bar() {
return "bar";
}
}
@Configuration(proxyBeanMethods = false)
static class ExampleBeanConfiguration {
@Bean
ExampleBean exampleBean() {
return new ExampleBean("test");
}
}
@Configuration(proxyBeanMethods = false)
static class ImpliedOnBeanMethod {
@Bean
@ConditionalOnMissingBean
ExampleBean exampleBean2() {
return new ExampleBean("test");
}
}
static class ExampleFactoryBean implements FactoryBean<ExampleBean> {
ExampleFactoryBean(String value) {
Assert.state(!value.contains("$"), "value should not contain '$'");
}
@Override
public ExampleBean getObject() {
return new ExampleBean("fromFactory");
}
@Override
public Class<?> getObjectType() {
return ExampleBean.class;
}
@Override
public boolean isSingleton() {
return false;
}
}
static class NonspecificFactoryBean implements FactoryBean<Object> {
NonspecificFactoryBean(String value) {
Assert.state(!value.contains("$"), "value should not contain '$'");
}
@Override
public ExampleBean getObject() {
return new ExampleBean("fromFactory");
}
@Override
public Class<?> getObjectType() {
return ExampleBean.class;
}
@Override
public boolean isSingleton() {
return false;
}
}
@Configuration(proxyBeanMethods = false)
static class ParameterizedWithCustomConfig {
@Bean
CustomExampleBean customExampleBean() {
return new CustomExampleBean();
}
}
@Configuration(proxyBeanMethods = false)
static class ParameterizedWithoutCustomConfig {
@Bean
OtherExampleBean otherExampleBean() {
return new OtherExampleBean();
}
}
@Configuration(proxyBeanMethods = false)
static class ParameterizedWithoutCustomContainerConfig {
@Bean
TestParameterizedContainer<OtherExampleBean> otherExampleBean() {
return new TestParameterizedContainer<>();
}
}
@Configuration(proxyBeanMethods = false)
static class ParameterizedWithCustomContainerConfig {
@Bean
TestParameterizedContainer<CustomExampleBean> customExampleBean() {
return new TestParameterizedContainer<>();
}
}
@Configuration(proxyBeanMethods = false)
static class ParameterizedConditionWithValueConfig {
@Bean
@ConditionalOnMissingBean(value = CustomExampleBean.class,
parameterizedContainer = TestParameterizedContainer.class)
CustomExampleBean conditionalCustomExampleBean() {
return new CustomExampleBean();
}
}
@Configuration(proxyBeanMethods = false)
static class ParameterizedConditionWithReturnTypeConfig {
@Bean
@ConditionalOnMissingBean(parameterizedContainer = TestParameterizedContainer.class)
CustomExampleBean conditionalCustomExampleBean() {
return new CustomExampleBean();
}
}
@Configuration(proxyBeanMethods = false)
static class ParameterizedConditionWithReturnRegistrationTypeConfig {
@Bean
@ConditionalOnMissingBean(parameterizedContainer = TestParameterizedContainer.class)
TestParameterizedContainer<CustomExampleBean> conditionalCustomExampleBean() {
return new TestParameterizedContainer<>();
}
}
@TestAnnotation
static class ExampleBean {
private String value;
ExampleBean(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
}
static class CustomExampleBean extends ExampleBean {
CustomExampleBean() {
super("custom subclass");
}
}
static class OtherExampleBean extends ExampleBean {
OtherExampleBean() {
super("other subclass");
}
}
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
@Documented
@interface TestAnnotation {
}
}
|
package test_util;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import test_Dao.ThresholdDao;
import test_entity.ThresholdTab;
public class LoginInit implements Runnable {
private String username;
public LoginInit(String username) {
// TODO Auto-generated constructor stub
this.username = username;
System.out.println("login init");
}
// protected void GetThreshold(String username) {
//
// new Thread(new Runnable() {
//
// public void run() {
//
// }
// });
// }
@Override
public void run() {
// TODO Auto-generated method stub
Connection connection = ConnectionFactory.getConnection();
ThresholdTab tab = new ThresholdTab();
ThresholdDao tabDao = new ThresholdDao();
System.out.println(username); //这句没有打印出来
tab.setUsername(username);
tab.setSensor("tem");
tab.setName("tem1");
// tab.setValue(30);
try {
// tabDao.update(connection, tab);
ResultSet rSet = tabDao.select(connection, tab);
while (rSet.next()) {
System.out.println(rSet.getInt("value"));
Pocket.setTemValue(rSet.getInt("value"));
}
} catch (SQLException e) {
e.printStackTrace();
}
tab.setSensor("hum");
tab.setName("hum1");
try {
ResultSet rSet = tabDao.select(connection, tab);
while (rSet.next()) {
System.out.println(rSet.getInt("value"));
Pocket.setHumValue(rSet.getInt("value"));
}
if (connection != null) {
connection.close();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
}
|
package offical_website.site.service;
import offical_website.site.model.Honor;
import java.util.List;
public interface HonorService {
int createHonor(String remark, String img);
int deleteHonor(long id);
int updateHonor(String remark, long id, String img);
List<Honor> getHonorAll();
Honor getHonorById(long id);
}
|
package com.pharmadm.custom.rega.reporteditor;
import java.util.Collection;
import com.pharmadm.custom.rega.queryeditor.OutputVariable;
import com.pharmadm.custom.rega.queryeditor.catalog.DbObject;
public interface OutputReportSeeder {
public OutputVariable getAssignedVariable(ObjectListVariable olvar);
public Collection getAvailableOutputVariables(DbObject obj);
public void assign(ObjectListVariable olvar, OutputVariable ovar);
}
|
package ghost.framework.jsr310.converter;
import ghost.framework.context.converter.EncodingTypeConverter;
import java.time.ZonedDateTime;
/**
* package: ghost.framework.jsr310.converter
*
* @Author: 郭树灿{gsc-e590}
* @link: 手机:13715848993, QQ 27048384
* @Description: {@link byte[]} to {@link ZonedDateTime}
* @Date: 2020/6/26:2:00
*/
public interface BytesToZonedDateTimeConverter<S, T> extends EncodingTypeConverter<S, T> {
}
|
/**
* TLS-Scanner - A TLS configuration and analysis tool based on TLS-Attacker.
*
* Copyright 2017-2019 Ruhr University Bochum / Hackmanit GmbH
*
* Licensed under Apache License 2.0
* http://www.apache.org/licenses/LICENSE-2.0
*/
package de.rub.nds.tlsscanner.serverscanner.report;
/**
*
* @author Robert Merget - robert.merget@rub.de
*/
public class ResultValue {
private String field;
private String value;
public ResultValue(String field, String value) {
this.field = field;
this.value = value;
}
public String getField() {
return field;
}
public void setField(String field) {
this.field = field;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
@Override
public String toString() {
return "" + field + ": " + value;
}
}
|
package com.octopus.utils.xml.auto.defpro.impl.utils;
import com.octopus.utils.flow.FlowParameters;
import com.octopus.utils.thread.ExecutorUtils;
import com.octopus.utils.xml.XMLMakeup;
import com.octopus.utils.xml.auto.XMLDoObject;
import com.octopus.utils.xml.auto.XMLParameter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.Iterator;
import java.util.Map;
/**
* User: wfgao_000
* Date: 16-6-2
* Time: 上午9:16
*/
public class DoAction{
transient static Log log = LogFactory.getLog(DoAction.class);
XMLDoObject obj;
XMLParameter env;
Map input;
Map output;
Map config;
Object[] callback;
XMLMakeup xml;
public XMLDoObject getObj() {
return obj;
}
public Map getConfig() {
return config;
}
public XMLMakeup getXml(){
return xml;
}
public DoAction(XMLDoObject obj,XMLParameter env,Map input,Map output,Map config,Object[] callback,XMLMakeup xml){
this.obj=obj;
this.env=env;
this.input=input;
this.output=output;
this.config=config;
this.callback=callback;
this.xml=xml;
}
public void doAction()throws Exception{
obj.doCheckThing(xml.getId(),env, input, output,config,xml);
if(null != callback){
//入参带入回调方法中
if(null != env && null != input){
Iterator its = input.keySet().iterator();
while(its.hasNext()){
String k = (String)its.next();
env.addParameter("${" + k + "}", input.get(k));
}
}
ExecutorUtils.synWork(callback[0], (String) callback[1], (Class[]) callback[2], (Object[]) callback[3]);
}
}
public void doNewContainerAction()throws Exception{
//log.error("-----do doNewContainerAction---");
FlowParameters nd = new FlowParameters(false);
//入参带入回调方法中
if(null != nd && null != input){
Iterator its = input.keySet().iterator();
while(its.hasNext()){
String k = (String)its.next();
nd.addParameter("${" + k + "}", input.get(k));
}
}
obj.doCheckThing(xml.getId(),nd, input, output,config,xml);
//log.error("-----finished doNewContainerAction----");
if(null != callback){
log.error("----do callback---");
ExecutorUtils.synWork(callback[0], (String) callback[1], (Class[]) callback[2], new Object[]{nd,((Object[])callback[3])[1],((Object[])callback[3])[2]});
}
}
}
|
package jetbrains.mps.lang.editor.menus.contextAssistant.tests;
/*Generated by MPS */
import jetbrains.mps.MPSLaunch;
import jetbrains.mps.lang.test.runtime.BaseTransformationTest;
import org.junit.ClassRule;
import jetbrains.mps.lang.test.runtime.TestParametersCache;
import org.junit.Test;
import jetbrains.mps.lang.test.runtime.BaseEditorTestBody;
import jetbrains.mps.lang.test.runtime.TransformationTest;
import javax.swing.SwingUtilities;
import jetbrains.mps.nodeEditor.EditorContext;
import jetbrains.mps.openapi.editor.assist.ContextAssistantManager;
import org.junit.Assert;
@MPSLaunch
public class ContextAssistant_DuplicationMenu_WrapDifferentSubstituteMenus_Test extends BaseTransformationTest {
@ClassRule
public static final TestParametersCache ourParamCache = new TestParametersCache(ContextAssistant_DuplicationMenu_WrapDifferentSubstituteMenus_Test.class, "${mps_home}", "r:5a4d10fc-2567-46c5-982f-547e9102417b(jetbrains.mps.lang.editor.menus.contextAssistant.tests@tests)", false);
public ContextAssistant_DuplicationMenu_WrapDifferentSubstituteMenus_Test() {
super(ourParamCache);
}
@Test
public void test_ContextAssistant_DuplicationMenu_WrapDifferentSubstituteMenus() throws Throwable {
new TestBody(this).testMethod();
}
/*package*/ static class TestBody extends BaseEditorTestBody {
/*package*/ TestBody(TransformationTest owner) {
super(owner);
}
@Override
public void testMethodImpl() throws Exception {
initEditorComponent("1892012100483348900", "");
SwingUtilities.invokeAndWait(() -> {
EditorContext editorContext = getEditorComponent().getEditorContext();
ContextAssistantManager contextAssistantManager = editorContext.getContextAssistantManager();
contextAssistantManager.updateImmediately();
Assert.assertNotNull(contextAssistantManager.getActiveAssistant());
Assert.assertNotNull(contextAssistantManager.getActiveMenuItems());
Assert.assertTrue(contextAssistantManager.getActiveMenuItems().size() == 2);
});
}
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.compaction;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.TimeUnit;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import com.google.common.collect.PeekingIterator;
import com.google.common.primitives.Ints;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.config.Config;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.utils.FBUtilities;
/**
* Handles the leveled manifest generations
*
* Not thread safe, all access should be synchronized in LeveledManifest
*/
class LeveledGenerations
{
private static final Logger logger = LoggerFactory.getLogger(LeveledGenerations.class);
private final boolean strictLCSChecksTest = Boolean.getBoolean(Config.PROPERTY_PREFIX + "test.strict_lcs_checks");
// allocate enough generations for a PB of data, with a 1-MB sstable size. (Note that if maxSSTableSize is
// updated, we will still have sstables of the older, potentially smaller size. So don't make this
// dependent on maxSSTableSize.)
static final int MAX_LEVEL_COUNT = (int) Math.log10(1000 * 1000 * 1000);
private final Set<SSTableReader> l0 = new HashSet<>();
private static long lastOverlapCheck = System.nanoTime();
// note that since l0 is broken out, levels[0] represents L1:
private final TreeSet<SSTableReader> [] levels = new TreeSet[MAX_LEVEL_COUNT - 1];
private static final Comparator<SSTableReader> nonL0Comparator = (o1, o2) -> {
int cmp = SSTableReader.sstableComparator.compare(o1, o2);
if (cmp == 0)
cmp = Ints.compare(o1.descriptor.generation, o2.descriptor.generation);
return cmp;
};
LeveledGenerations()
{
for (int i = 0; i < MAX_LEVEL_COUNT - 1; i++)
levels[i] = new TreeSet<>(nonL0Comparator);
}
Set<SSTableReader> get(int level)
{
if (level > levelCount() - 1 || level < 0)
throw new ArrayIndexOutOfBoundsException("Invalid generation " + level + " - maximum is " + (levelCount() - 1));
if (level == 0)
return l0;
return levels[level - 1];
}
int levelCount()
{
return levels.length + 1;
}
/**
* Adds readers to the correct level
*
* If adding an sstable would cause an overlap in the level (if level > 1) we send it to L0. This can happen
* for example when moving sstables from unrepaired to repaired.
*
* If the sstable is already in the manifest we skip it.
*
* If the sstable exists in the manifest but has the wrong level, it is removed from the wrong level and added to the correct one
*
* todo: group sstables per level, add all if level is currently empty, improve startup speed
*/
void addAll(Iterable<SSTableReader> readers)
{
logDistribution();
for (SSTableReader sstable : readers)
{
assert sstable.getSSTableLevel() < levelCount() : "Invalid level " + sstable.getSSTableLevel() + " out of " + (levelCount() - 1);
int existingLevel = getLevelIfExists(sstable);
if (existingLevel != -1)
{
if (sstable.getSSTableLevel() != existingLevel)
{
logger.error("SSTable {} on the wrong level in the manifest - {} instead of {} as recorded in the sstable metadata, removing from level {}", sstable, existingLevel, sstable.getSSTableLevel(), existingLevel);
if (strictLCSChecksTest)
throw new AssertionError("SSTable not in matching level in manifest: "+sstable + ": "+existingLevel+" != " + sstable.getSSTableLevel());
get(existingLevel).remove(sstable);
}
else
{
logger.info("Manifest already contains {} in level {} - skipping", sstable, existingLevel);
continue;
}
}
if (sstable.getSSTableLevel() == 0)
{
l0.add(sstable);
continue;
}
TreeSet<SSTableReader> level = levels[sstable.getSSTableLevel() - 1];
/*
current level: |-----||----||----| |---||---|
new sstable: |--|
^ before
^ after
overlap if before.last >= newsstable.first or after.first <= newsstable.last
*/
SSTableReader after = level.ceiling(sstable);
SSTableReader before = level.floor(sstable);
if (before != null && before.last.compareTo(sstable.first) >= 0 ||
after != null && after.first.compareTo(sstable.last) <= 0)
{
if (strictLCSChecksTest) // we can only assert this in tests since this is normal when for example moving sstables from unrepaired to repaired
throw new AssertionError("Got unexpected overlap in level "+sstable.getSSTableLevel());
sendToL0(sstable);
}
else
{
level.add(sstable);
}
}
maybeVerifyLevels();
}
/**
* Sends sstable to L0 by mutating its level in the sstable metadata.
*
* SSTable should not exist in the manifest
*/
private void sendToL0(SSTableReader sstable)
{
try
{
sstable.descriptor.getMetadataSerializer().mutateLevel(sstable.descriptor, 0);
sstable.reloadSSTableMetadata();
}
catch (IOException e)
{
// Adding it to L0 and marking suspect is probably the best we can do here - it won't create overlap
// and we won't pick it for later compactions.
logger.error("Failed mutating sstable metadata for {} - adding it to L0 to avoid overlap. Marking suspect", sstable, e);
sstable.markSuspect();
}
l0.add(sstable);
}
/**
* Tries to find the sstable in the levels without using the sstable-recorded level
*
* Used to make sure we don't try to re-add an existing sstable
*/
private int getLevelIfExists(SSTableReader sstable)
{
for (int i = 0; i < levelCount(); i++)
{
if (get(i).contains(sstable))
return i;
}
return -1;
}
int remove(Collection<SSTableReader> readers)
{
int minLevel = Integer.MAX_VALUE;
for (SSTableReader sstable : readers)
{
int level = sstable.getSSTableLevel();
minLevel = Math.min(minLevel, level);
get(level).remove(sstable);
}
return minLevel;
}
int[] getAllLevelSize()
{
int[] counts = new int[levelCount()];
for (int i = 0; i < levelCount(); i++)
counts[i] = get(i).size();
return counts;
}
Set<SSTableReader> allSSTables()
{
ImmutableSet.Builder<SSTableReader> builder = ImmutableSet.builder();
builder.addAll(l0);
for (Set<SSTableReader> sstables : levels)
builder.addAll(sstables);
return builder.build();
}
/**
* given a level with sstables with first tokens [0, 10, 20, 30] and a lastCompactedSSTable with last = 15, we will
* return an Iterator over [20, 30, 0, 10].
*/
Iterator<SSTableReader> wrappingIterator(int lvl, SSTableReader lastCompactedSSTable)
{
assert lvl > 0; // only makes sense in L1+
TreeSet<SSTableReader> level = levels[lvl - 1];
if (level.isEmpty())
return Collections.emptyIterator();
if (lastCompactedSSTable == null)
return level.iterator();
PeekingIterator<SSTableReader> tail = Iterators.peekingIterator(level.tailSet(lastCompactedSSTable).iterator());
SSTableReader pivot = null;
// then we need to make sure that the first token of the pivot is greater than the last token of the lastCompactedSSTable
while (tail.hasNext())
{
SSTableReader potentialPivot = tail.peek();
if (potentialPivot.first.compareTo(lastCompactedSSTable.last) > 0)
{
pivot = potentialPivot;
break;
}
tail.next();
}
if (pivot == null)
return level.iterator();
return Iterators.concat(tail, level.headSet(pivot, false).iterator());
}
void logDistribution()
{
if (logger.isTraceEnabled())
{
for (int i = 0; i < levelCount(); i++)
{
Set<SSTableReader> level = get(i);
if (!level.isEmpty())
{
logger.trace("L{} contains {} SSTables ({}) in {}",
i,
level.size(),
FBUtilities.prettyPrintMemory(SSTableReader.getTotalBytes(level)),
this);
}
}
}
}
Set<SSTableReader>[] snapshot()
{
Set<SSTableReader> [] levelsCopy = new Set[levelCount()];
for (int i = 0; i < levelCount(); i++)
levelsCopy[i] = ImmutableSet.copyOf(get(i));
return levelsCopy;
}
/**
* do extra verification of the sstables in the generations
*
* only used during tests
*/
private void maybeVerifyLevels()
{
if (!strictLCSChecksTest || System.nanoTime() - lastOverlapCheck <= TimeUnit.NANOSECONDS.convert(5, TimeUnit.SECONDS))
return;
logger.info("LCS verifying levels");
lastOverlapCheck = System.nanoTime();
for (int i = 1; i < levelCount(); i++)
{
SSTableReader prev = null;
for (SSTableReader sstable : get(i))
{
// no overlap:
assert prev == null || prev.last.compareTo(sstable.first) < 0;
prev = sstable;
// make sure it does not exist in any other level:
for (int j = 0; j < levelCount(); j++)
{
if (i == j)
continue;
assert !get(j).contains(sstable);
}
}
}
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.runtime.task;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.ipc.ProtocolSignature;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.log4j.Logger;
import org.apache.tez.common.ContainerContext;
import org.apache.tez.common.ContainerTask;
import org.apache.tez.common.TezTaskUmbilicalProtocol;
import org.apache.tez.dag.api.ProcessorDescriptor;
import org.apache.tez.dag.api.TezConfiguration;
import org.apache.tez.dag.api.TezException;
import org.apache.tez.dag.api.UserPayload;
import org.apache.tez.dag.records.TezDAGID;
import org.apache.tez.dag.records.TezTaskAttemptID;
import org.apache.tez.dag.records.TezTaskID;
import org.apache.tez.dag.records.TezVertexID;
import org.apache.tez.runtime.api.ProcessorContext;
import org.apache.tez.runtime.api.events.TaskAttemptCompletedEvent;
import org.apache.tez.runtime.api.events.TaskAttemptFailedEvent;
import org.apache.tez.runtime.api.impl.InputSpec;
import org.apache.tez.runtime.api.impl.OutputSpec;
import org.apache.tez.runtime.api.impl.TaskSpec;
import org.apache.tez.runtime.api.impl.TezEvent;
import org.apache.tez.runtime.api.impl.TezHeartbeatRequest;
import org.apache.tez.runtime.api.impl.TezHeartbeatResponse;
import org.apache.tez.runtime.library.processor.SimpleProcessor;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.HashMultimap;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
// Tests in this class cannot be run in parallel.
public class TestTaskExecution {
private static final Logger LOG = Logger.getLogger(TestTaskExecution.class);
private static final String HEARTBEAT_EXCEPTION_STRING = "HeartbeatException";
private static final Configuration defaultConf = new Configuration();
private static final FileSystem localFs;
private static final Path workDir;
private static final ExecutorService taskExecutor = Executors.newFixedThreadPool(1);
static {
defaultConf.set("fs.defaultFS", "file:///");
try {
localFs = FileSystem.getLocal(defaultConf);
Path wd = new Path(System.getProperty("test.build.data", "/tmp"),
TestTaskExecution.class.getSimpleName());
workDir = localFs.makeQualified(wd);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Before
public void reset() {
TestProcessor.reset();
}
@AfterClass
public static void shutdown() {
taskExecutor.shutdownNow();
}
@Test(timeout = 5000)
public void testSingleSuccessfulTask() throws IOException, InterruptedException, TezException,
ExecutionException {
ListeningExecutorService executor = null;
try {
ExecutorService rawExecutor = Executors.newFixedThreadPool(1);
executor = MoreExecutors.listeningDecorator(rawExecutor);
ApplicationId appId = ApplicationId.newInstance(10000, 1);
TezTaskUmbilicalForTest umbilical = new TezTaskUmbilicalForTest();
TaskReporter taskReporter = createTaskReporter(appId, umbilical);
TezTaskRunner taskRunner = createTaskRunner(appId, umbilical, taskReporter, executor,
TestProcessor.CONF_EMPTY);
// Setup the executor
Future<Boolean> taskRunnerFuture = taskExecutor.submit(new TaskRunnerCallable(taskRunner));
// Signal the processor to go through
TestProcessor.signal();
boolean result = taskRunnerFuture.get();
assertTrue(result);
assertNull(taskReporter.currentCallable);
umbilical.verifyTaskSuccessEvent();
} finally {
executor.shutdownNow();
}
}
@Test(timeout = 5000)
public void testMultipleSuccessfulTasks() throws IOException, InterruptedException, TezException,
ExecutionException {
ListeningExecutorService executor = null;
try {
ExecutorService rawExecutor = Executors.newFixedThreadPool(1);
executor = MoreExecutors.listeningDecorator(rawExecutor);
ApplicationId appId = ApplicationId.newInstance(10000, 1);
TezTaskUmbilicalForTest umbilical = new TezTaskUmbilicalForTest();
TaskReporter taskReporter = createTaskReporter(appId, umbilical);
TezTaskRunner taskRunner = createTaskRunner(appId, umbilical, taskReporter, executor,
TestProcessor.CONF_EMPTY);
// Setup the executor
Future<Boolean> taskRunnerFuture = taskExecutor.submit(new TaskRunnerCallable(taskRunner));
// Signal the processor to go through
TestProcessor.signal();
boolean result = taskRunnerFuture.get();
assertTrue(result);
assertNull(taskReporter.currentCallable);
umbilical.verifyTaskSuccessEvent();
umbilical.resetTrackedEvents();
taskRunner = createTaskRunner(appId, umbilical, taskReporter, executor,
TestProcessor.CONF_EMPTY);
// Setup the executor
taskRunnerFuture = taskExecutor.submit(new TaskRunnerCallable(taskRunner));
// Signal the processor to go through
TestProcessor.signal();
result = taskRunnerFuture.get();
assertTrue(result);
assertNull(taskReporter.currentCallable);
umbilical.verifyTaskSuccessEvent();
} finally {
executor.shutdownNow();
}
}
// test tasked failed due to exception in Processor
@Test(timeout = 5000)
public void testFailedTask() throws IOException, InterruptedException, TezException {
ListeningExecutorService executor = null;
try {
ExecutorService rawExecutor = Executors.newFixedThreadPool(1);
executor = MoreExecutors.listeningDecorator(rawExecutor);
ApplicationId appId = ApplicationId.newInstance(10000, 1);
TezTaskUmbilicalForTest umbilical = new TezTaskUmbilicalForTest();
TaskReporter taskReporter = createTaskReporter(appId, umbilical);
TezTaskRunner taskRunner = createTaskRunner(appId, umbilical, taskReporter, executor,
TestProcessor.CONF_THROW_TEZ_EXCEPTION);
// Setup the executor
Future<Boolean> taskRunnerFuture = taskExecutor.submit(new TaskRunnerCallable(taskRunner));
// Signal the processor to go through
TestProcessor.awaitStart();
TestProcessor.signal();
try {
taskRunnerFuture.get();
fail("Expecting the task to fail");
} catch (ExecutionException e) {
Throwable cause = e.getCause();
LOG.info(cause.getClass().getName());
assertTrue(cause instanceof TezException);
}
assertNull(taskReporter.currentCallable);
umbilical.verifyTaskFailedEvent("Failure while running task:org.apache.tez.dag.api.TezException: TezException");
} finally {
executor.shutdownNow();
}
}
// Test task failed due to Processor class not found
@Test(timeout = 5000)
public void testFailedTask2() throws IOException, InterruptedException, TezException {
ListeningExecutorService executor = null;
try {
ExecutorService rawExecutor = Executors.newFixedThreadPool(1);
executor = MoreExecutors.listeningDecorator(rawExecutor);
ApplicationId appId = ApplicationId.newInstance(10000, 1);
TezTaskUmbilicalForTest umbilical = new TezTaskUmbilicalForTest();
TaskReporter taskReporter = createTaskReporter(appId, umbilical);
TezTaskRunner taskRunner = createTaskRunner(appId, umbilical, taskReporter, executor,
"NotExitedProcessor", TestProcessor.CONF_THROW_TEZ_EXCEPTION);
// Setup the executor
Future<Boolean> taskRunnerFuture = taskExecutor.submit(new TaskRunnerCallable(taskRunner));
try {
taskRunnerFuture.get();
} catch (ExecutionException e) {
Throwable cause = e.getCause();
LOG.info(cause.getClass().getName());
assertTrue(cause instanceof TezException);
}
assertNull(taskReporter.currentCallable);
umbilical.verifyTaskFailedEvent("Failure while running task:org.apache.tez.dag.api.TezUncheckedException: "
+ "Unable to load class: NotExitedProcessor");
} finally {
executor.shutdownNow();
}
}
@Test(timeout = 5000)
public void testHeartbeatException() throws IOException, InterruptedException, TezException {
ListeningExecutorService executor = null;
try {
ExecutorService rawExecutor = Executors.newFixedThreadPool(1);
executor = MoreExecutors.listeningDecorator(rawExecutor);
ApplicationId appId = ApplicationId.newInstance(10000, 1);
TezTaskUmbilicalForTest umbilical = new TezTaskUmbilicalForTest();
TaskReporter taskReporter = createTaskReporter(appId, umbilical);
TezTaskRunner taskRunner = createTaskRunner(appId, umbilical, taskReporter, executor,
TestProcessor.CONF_EMPTY);
// Setup the executor
Future<Boolean> taskRunnerFuture = taskExecutor.submit(new TaskRunnerCallable(taskRunner));
// Signal the processor to go through
TestProcessor.awaitStart();
umbilical.signalThrowException();
umbilical.awaitRegisteredEvent();
// Not signaling an actual start to verify task interruption
try {
taskRunnerFuture.get();
fail("Expecting the task to fail");
} catch (ExecutionException e) {
Throwable cause = e.getCause();
assertTrue(cause instanceof IOException);
assertTrue(cause.getMessage().contains(HEARTBEAT_EXCEPTION_STRING));
}
TestProcessor.awaitCompletion();
assertTrue(TestProcessor.wasInterrupted());
assertNull(taskReporter.currentCallable);
// No completion events since umbilical communication already failed.
umbilical.verifyNoCompletionEvents();
} finally {
executor.shutdownNow();
}
}
@Test(timeout = 5000)
public void testHeartbeatShouldDie() throws IOException, InterruptedException, TezException,
ExecutionException {
ListeningExecutorService executor = null;
try {
ExecutorService rawExecutor = Executors.newFixedThreadPool(1);
executor = MoreExecutors.listeningDecorator(rawExecutor);
ApplicationId appId = ApplicationId.newInstance(10000, 1);
TezTaskUmbilicalForTest umbilical = new TezTaskUmbilicalForTest();
TaskReporter taskReporter = createTaskReporter(appId, umbilical);
TezTaskRunner taskRunner = createTaskRunner(appId, umbilical, taskReporter, executor,
TestProcessor.CONF_EMPTY);
// Setup the executor
Future<Boolean> taskRunnerFuture = taskExecutor.submit(new TaskRunnerCallable(taskRunner));
// Signal the processor to go through
TestProcessor.awaitStart();
umbilical.signalSendShouldDie();
umbilical.awaitRegisteredEvent();
// Not signaling an actual start to verify task interruption
boolean result = taskRunnerFuture.get();
assertFalse(result);
TestProcessor.awaitCompletion();
assertTrue(TestProcessor.wasInterrupted());
assertNull(taskReporter.currentCallable);
// TODO Is this statement correct ?
// No completion events since shouldDie was requested by the AM, which should have killed the
// task.
umbilical.verifyNoCompletionEvents();
} finally {
executor.shutdownNow();
}
}
@Test(timeout = 5000)
public void testGetTaskShouldDie() throws InterruptedException, ExecutionException {
ListeningExecutorService executor = null;
try {
ExecutorService rawExecutor = Executors.newFixedThreadPool(1);
executor = MoreExecutors.listeningDecorator(rawExecutor);
ApplicationId appId = ApplicationId.newInstance(10000, 1);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
TezTaskUmbilicalForTest umbilical = new TezTaskUmbilicalForTest();
ContainerContext containerContext = new ContainerContext(containerId.toString());
ContainerReporter containerReporter = new ContainerReporter(umbilical, containerContext, 100);
ListenableFuture<ContainerTask> getTaskFuture = executor.submit(containerReporter);
getTaskFuture.get();
assertEquals(1, umbilical.getTaskInvocations);
} finally {
executor.shutdownNow();
}
}
// Potential new tests
// Different states - initialization failure, close failure
// getTask states
private static class TaskRunnerCallable implements Callable<Boolean> {
private final TezTaskRunner taskRunner;
public TaskRunnerCallable(TezTaskRunner taskRunner) {
this.taskRunner = taskRunner;
}
@Override
public Boolean call() throws Exception {
return taskRunner.run();
}
}
// Uses static fields for signaling. Ensure only used by one test at a time.
public static class TestProcessor extends SimpleProcessor {
public static final byte[] CONF_EMPTY = new byte[] { 0 };
public static final byte[] CONF_THROW_IO_EXCEPTION = new byte[] { 1 };
public static final byte[] CONF_THROW_TEZ_EXCEPTION = new byte[] { 2 };
public static final byte[] CONF_SIGNAL_FATAL_AND_THROW = new byte[] { 4 };
public static final byte[] CONF_SIGNAL_FATAL_AND_LOOP = new byte[] { 8 };
public static final byte[] CONF_SIGNAL_FATAL_AND_COMPLETE = new byte[] { 16 };
private static final Logger LOG = Logger.getLogger(TestProcessor.class);
private static final ReentrantLock processorLock = new ReentrantLock();
private static final Condition processorCondition = processorLock.newCondition();
private static final Condition completionCondition = processorLock.newCondition();
private static final Condition runningCondition = processorLock.newCondition();
private static boolean completed = false;
private static boolean running = false;
private static boolean signalled = false;
public static boolean receivedInterrupt = false;
private boolean throwIOException = false;
private boolean throwTezException = false;
private boolean signalFatalAndThrow = false;
private boolean signalFatalAndLoop = false;
private boolean signalFatalAndComplete = false;
public TestProcessor(ProcessorContext context) {
super(context);
}
@Override
public void initialize() throws Exception {
parseConf(getContext().getUserPayload().deepCopyAsArray());
}
private void parseConf(byte[] bytes) {
byte b = bytes[0];
throwIOException = (b & 1) > 1;
throwTezException = (b & 2) > 1;
signalFatalAndThrow = (b & 4) > 1;
signalFatalAndLoop = (b & 8) > 1;
signalFatalAndComplete = (b & 16) > 1;
}
public static void reset() {
signalled = false;
receivedInterrupt = false;
completed = false;
running = false;
}
public static void signal() {
LOG.info("Signalled");
processorLock.lock();
try {
signalled = true;
processorCondition.signal();
} finally {
processorLock.unlock();
}
}
public static void awaitStart() throws InterruptedException {
LOG.info("Awaiting Process run");
processorLock.lock();
try {
if (running) {
return;
}
runningCondition.await();
} finally {
processorLock.unlock();
}
}
public static void awaitCompletion() throws InterruptedException {
LOG.info("Await completion");
processorLock.lock();
try {
if (completed) {
return;
} else {
completionCondition.await();
}
} finally {
processorLock.unlock();
}
}
public static boolean wasInterrupted() {
processorLock.lock();
try {
return receivedInterrupt;
} finally {
processorLock.unlock();
}
}
@Override
public void run() throws Exception {
processorLock.lock();
running = true;
runningCondition.signal();
try {
try {
LOG.info("Signal is: " + signalled);
if (!signalled) {
LOG.info("Waiting for processor signal");
processorCondition.await();
}
if (Thread.currentThread().isInterrupted()) {
throw new InterruptedException();
}
LOG.info("Received processor signal");
if (throwIOException) {
throw new IOException();
} else if (throwTezException) {
throw new TezException("TezException");
} else if (signalFatalAndThrow) {
IOException io = new IOException("FATALERROR");
getContext().fatalError(io, "FATALERROR");
throw io;
} else if (signalFatalAndComplete) {
IOException io = new IOException("FATALERROR");
getContext().fatalError(io, "FATALERROR");
return;
} else if (signalFatalAndLoop) {
IOException io = new IOException("FATALERROR");
getContext().fatalError(io, "FATALERROR");
LOG.info("Waiting for Processor signal again");
processorCondition.await();
LOG.info("Received second processor signal");
}
} catch (InterruptedException e) {
receivedInterrupt = true;
}
} finally {
completed = true;
completionCondition.signal();
processorLock.unlock();
}
}
}
private static class TezTaskUmbilicalForTest implements TezTaskUmbilicalProtocol {
private static final Logger LOG = Logger.getLogger(TezTaskUmbilicalForTest.class);
private final List<TezEvent> requestEvents = new LinkedList<TezEvent>();
private final ReentrantLock umbilicalLock = new ReentrantLock();
private final Condition eventCondition = umbilicalLock.newCondition();
private boolean pendingEvent = false;
private boolean eventEnacted = false;
volatile int getTaskInvocations = 0;
private boolean shouldThrowException = false;
private boolean shouldSendDieSignal = false;
public void signalThrowException() {
umbilicalLock.lock();
try {
shouldThrowException = true;
pendingEvent = true;
} finally {
umbilicalLock.unlock();
}
}
public void signalSendShouldDie() {
umbilicalLock.lock();
try {
shouldSendDieSignal = true;
pendingEvent = true;
} finally {
umbilicalLock.unlock();
}
}
public void awaitRegisteredEvent() throws InterruptedException {
umbilicalLock.lock();
try {
if (eventEnacted) {
return;
}
LOG.info("Awaiting event");
eventCondition.await();
} finally {
umbilicalLock.unlock();
}
}
public void resetTrackedEvents() {
umbilicalLock.lock();
try {
requestEvents.clear();
} finally {
umbilicalLock.unlock();
}
}
public void verifyNoCompletionEvents() {
umbilicalLock.lock();
try {
for (TezEvent event : requestEvents) {
if (event.getEvent() instanceof TaskAttemptFailedEvent) {
fail("Found a TaskAttemptFailedEvent when not expected");
}
if (event.getEvent() instanceof TaskAttemptCompletedEvent) {
fail("Found a TaskAttemptCompletedvent when not expected");
}
}
} finally {
umbilicalLock.unlock();
}
}
public void verifyTaskFailedEvent(String diagnostics) {
umbilicalLock.lock();
try {
for (TezEvent event : requestEvents) {
if (event.getEvent() instanceof TaskAttemptFailedEvent) {
TaskAttemptFailedEvent failedEvent = (TaskAttemptFailedEvent)event.getEvent();
if(failedEvent.getDiagnostics().startsWith(diagnostics)){
return ;
} else {
fail("No detailed diagnostics message in TaskAttemptFailedEvent");
}
}
}
fail("No TaskAttemptFailedEvents sent over umbilical");
} finally {
umbilicalLock.unlock();
}
}
public void verifyTaskSuccessEvent() {
umbilicalLock.lock();
try {
for (TezEvent event : requestEvents) {
if (event.getEvent() instanceof TaskAttemptCompletedEvent) {
return;
}
}
fail("No TaskAttemptFailedEvents sent over umbilical");
} finally {
umbilicalLock.unlock();
}
}
@Override
public long getProtocolVersion(String protocol, long clientVersion) throws IOException {
return 0;
}
@Override
public ProtocolSignature getProtocolSignature(String protocol, long clientVersion,
int clientMethodsHash) throws IOException {
return null;
}
@Override
public ContainerTask getTask(ContainerContext containerContext) throws IOException {
// Return shouldDie = true
getTaskInvocations++;
return new ContainerTask(null, true, null, null, false);
}
@Override
public boolean canCommit(TezTaskAttemptID taskid) throws IOException {
return true;
}
@Override
public TezHeartbeatResponse heartbeat(TezHeartbeatRequest request) throws IOException,
TezException {
umbilicalLock.lock();
if (request.getEvents() != null) {
requestEvents.addAll(request.getEvents());
}
try {
if (shouldThrowException) {
LOG.info("TestUmbilical throwing Exception");
throw new IOException(HEARTBEAT_EXCEPTION_STRING);
}
TezHeartbeatResponse response = new TezHeartbeatResponse();
response.setLastRequestId(request.getRequestId());
if (shouldSendDieSignal) {
LOG.info("TestUmbilical returning shouldDie=true");
response.setShouldDie();
}
return response;
} finally {
if (pendingEvent) {
eventEnacted = true;
LOG.info("Signalling Event");
eventCondition.signal();
}
umbilicalLock.unlock();
}
}
}
private TaskReporter createTaskReporter(ApplicationId appId, TezTaskUmbilicalForTest umbilical) {
TaskReporter taskReporter = new TaskReporter(umbilical, 100, 1000, 100, new AtomicLong(0),
createContainerId(appId).toString());
return taskReporter;
}
private TezTaskRunner createTaskRunner(ApplicationId appId, TezTaskUmbilicalForTest umbilical,
TaskReporter taskReporter, ListeningExecutorService executor, byte[] processorConf)
throws IOException {
return createTaskRunner(appId, umbilical, taskReporter, executor, TestProcessor.class.getName(),
processorConf);
}
private TezTaskRunner createTaskRunner(ApplicationId appId, TezTaskUmbilicalForTest umbilical,
TaskReporter taskReporter, ListeningExecutorService executor, String processorClass, byte[] processorConf) throws IOException{
TezConfiguration tezConf = new TezConfiguration(defaultConf);
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
Path testDir = new Path(workDir, UUID.randomUUID().toString());
String[] localDirs = new String[] { testDir.toString() };
TezDAGID dagId = TezDAGID.getInstance(appId, 1);
TezVertexID vertexId = TezVertexID.getInstance(dagId, 1);
TezTaskID taskId = TezTaskID.getInstance(vertexId, 1);
TezTaskAttemptID taskAttemptId = TezTaskAttemptID.getInstance(taskId, 1);
ProcessorDescriptor processorDescriptor = ProcessorDescriptor.create(processorClass)
.setUserPayload(UserPayload.create(ByteBuffer.wrap(processorConf)));
TaskSpec taskSpec = new TaskSpec(taskAttemptId, "dagName", "vertexName", -1, processorDescriptor,
new ArrayList<InputSpec>(), new ArrayList<OutputSpec>(), null);
TezTaskRunner taskRunner = new TezTaskRunner(tezConf, ugi, localDirs, taskSpec, umbilical, 1,
new HashMap<String, ByteBuffer>(), new HashMap<String, String>(), HashMultimap.<String, String> create(), taskReporter,
executor, null);
return taskRunner;
}
private ContainerId createContainerId(ApplicationId appId) {
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
return containerId;
}
}
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.importexport.model.transform;
import org.w3c.dom.Node;
import javax.annotation.Generated;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.transform.StandardErrorUnmarshaller;
import com.amazonaws.services.importexport.model.MultipleRegionsException;
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class MultipleRegionsExceptionUnmarshaller extends StandardErrorUnmarshaller {
public MultipleRegionsExceptionUnmarshaller() {
super(MultipleRegionsException.class);
}
@Override
public AmazonServiceException unmarshall(Node node) throws Exception {
// Bail out if this isn't the right error code that this
// marshaller understands
String errorCode = parseErrorCode(node);
if (errorCode == null || !errorCode.equals("MultipleRegionsException"))
return null;
MultipleRegionsException e = (MultipleRegionsException) super.unmarshall(node);
return e;
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package br.edu.ifma.si.programacaoextrema.projetotcc.repository;
import br.edu.ifma.si.programacaoextrema.projetotcc.model.Entidade;
import java.util.Objects;
import javax.persistence.EntityManager;
/**
*
* @author silas
*/
public class DAO<T extends Entidade> {
private final EntityManager manager;
DAO(EntityManager manager) {
this.manager = manager;
}
T buscaPorId(Class<T> cClass, Integer id) {
return manager.find(cClass, id);
}
T buscaPorObject(Class<T> cClass, Object object) {
return manager.find(cClass, object);
}
T salvaOuAtualiza(T t) {
if( Objects.isNull(t.getId()) )
this.manager.persist(t);
else
t = this.manager.merge(t);
return t;
}
void remove(T t) {
manager.remove(t);
manager.flush();
}
}
|
package com.ziroom.ziroomcustomer.newclean.d;
import java.util.List;
public class f
{
private List<ay> a;
private String b;
private Integer c;
private String d;
private a e;
private a f;
public a getBedroomCleanDateTime()
{
return this.e;
}
public String getDocument()
{
return this.d;
}
public List<ay> getOptionDateList()
{
return this.a;
}
public Integer getPublicAreaStatus()
{
return this.c;
}
public String getTransactionScheduleSwitch()
{
return this.b;
}
public a getZrkCleanDateTime()
{
return this.f;
}
public void setBedroomCleanDateTime(a parama)
{
this.e = parama;
}
public void setDocument(String paramString)
{
this.d = paramString;
}
public void setOptionDateList(List<ay> paramList)
{
this.a = paramList;
}
public void setPublicAreaStatus(Integer paramInteger)
{
this.c = paramInteger;
}
public void setTransactionScheduleSwitch(String paramString)
{
this.b = paramString;
}
public void setZrkCleanDateTime(a parama)
{
this.f = parama;
}
public static class a
{
private String a;
private String b;
private String c;
private String d;
public String getDate()
{
return this.a;
}
public String getDocument()
{
return this.d;
}
public String getTime()
{
return this.b;
}
public String getWeek()
{
return this.c;
}
public void setDate(String paramString)
{
this.a = paramString;
}
public void setDocument(String paramString)
{
this.d = paramString;
}
public void setTime(String paramString)
{
this.b = paramString;
}
public void setWeek(String paramString)
{
this.c = paramString;
}
}
}
/* Location: /Users/gaoht/Downloads/zirom/classes3-dex2jar.jar!/com/ziroom/ziroomcustomer/newclean/d/f.class
* Java compiler version: 6 (50.0)
* JD-Core Version: 0.7.1
*/
|
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE190_Integer_Overflow__long_max_multiply_17.java
Label Definition File: CWE190_Integer_Overflow.label.xml
Template File: sources-sinks-17.tmpl.java
*/
/*
* @description
* CWE: 190 Integer Overflow
* BadSource: max Set data to the max value for long
* GoodSource: A hardcoded non-zero, non-min, non-max, even number
* Sinks: multiply
* GoodSink: Ensure there will not be an overflow before multiplying data by 2
* BadSink : If data is positive, multiply by 2, which can cause an overflow
* Flow Variant: 17 Control flow: for loops
*
* */
package testcases.CWE190_Integer_Overflow.s04;
import testcasesupport.*;
import javax.servlet.http.*;
public class CWE190_Integer_Overflow__long_max_multiply_17 extends AbstractTestCase
{
public void bad() throws Throwable
{
long data;
/* We need to have one source outside of a for loop in order
* to prevent the Java compiler from generating an error because
* data is uninitialized
*/
/* POTENTIAL FLAW: Use the maximum size of the data type */
data = Long.MAX_VALUE;
for (int j = 0; j < 1; j++)
{
if(data > 0) /* ensure we won't have an underflow */
{
/* POTENTIAL FLAW: if (data*2) > Long.MAX_VALUE, this will overflow */
long result = (long)(data * 2);
IO.writeLine("result: " + result);
}
}
}
/* goodG2B() - use goodsource and badsink */
private void goodG2B() throws Throwable
{
long data;
/* FIX: Use a hardcoded number that won't cause underflow, overflow, divide by zero, or loss-of-precision issues */
data = 2;
for (int j = 0; j < 1; j++)
{
if(data > 0) /* ensure we won't have an underflow */
{
/* POTENTIAL FLAW: if (data*2) > Long.MAX_VALUE, this will overflow */
long result = (long)(data * 2);
IO.writeLine("result: " + result);
}
}
}
/* goodB2G() - use badsource and goodsink*/
private void goodB2G() throws Throwable
{
long data;
/* POTENTIAL FLAW: Use the maximum size of the data type */
data = Long.MAX_VALUE;
for (int k = 0; k < 1; k++)
{
if(data > 0) /* ensure we won't have an underflow */
{
/* FIX: Add a check to prevent an overflow from occurring */
if (data < (Long.MAX_VALUE/2))
{
long result = (long)(data * 2);
IO.writeLine("result: " + result);
}
else
{
IO.writeLine("data value is too large to perform multiplication.");
}
}
}
}
public void good() throws Throwable
{
goodG2B();
goodB2G();
}
/* Below is the main(). It is only used when building this testcase on
* its own for testing or for building a binary to use in testing binary
* analysis tools. It is not used when compiling all the testcases as one
* application, which is how source code analysis tools are tested.
*/
public static void main(String[] args) throws ClassNotFoundException,
InstantiationException, IllegalAccessException
{
mainFromParent(args);
}
}
|
package com.baeldung.concurrent.threadfactory;
import java.util.concurrent.ThreadFactory;
public class BaeldungThreadFactory implements ThreadFactory {
private int threadId;
private String name;
public BaeldungThreadFactory(String name) {
threadId = 1;
this.name = name;
}
@Override
public Thread newThread(Runnable r) {
Thread t = new Thread(r, name + "-Thread_" + threadId);
System.out.println("created new thread with id : " + threadId + " and name : " + t.getName());
threadId++;
return t;
}
}
|
package com.shure.surdes.system.service;
import java.util.List;
import java.util.Set;
import com.shure.surdes.common.core.domain.TreeSelect;
import com.shure.surdes.common.core.domain.entity.SysMenu;
import com.shure.surdes.system.domain.vo.RouterVo;
/**
* 菜单 业务层
*
* @author ruoyi
*/
public interface ISysMenuService
{
/**
* 根据用户查询系统菜单列表
*
* @param userId 用户ID
* @return 菜单列表
*/
public List<SysMenu> selectMenuList(Long userId);
/**
* 根据用户查询系统菜单列表
*
* @param menu 菜单信息
* @param userId 用户ID
* @return 菜单列表
*/
public List<SysMenu> selectMenuList(SysMenu menu, Long userId);
/**
* 根据用户ID查询权限
*
* @param userId 用户ID
* @return 权限列表
*/
public Set<String> selectMenuPermsByUserId(Long userId);
/**
* 根据用户ID查询菜单树信息
*
* @param userId 用户ID
* @return 菜单列表
*/
public List<SysMenu> selectMenuTreeByUserId(Long userId);
/**
* 根据角色ID查询菜单树信息
*
* @param roleId 角色ID
* @return 选中菜单列表
*/
public List<Integer> selectMenuListByRoleId(Long roleId);
/**
* 构建前端路由所需要的菜单
*
* @param menus 菜单列表
* @return 路由列表
*/
public List<RouterVo> buildMenus(List<SysMenu> menus);
/**
* 构建前端所需要树结构
*
* @param menus 菜单列表
* @return 树结构列表
*/
public List<SysMenu> buildMenuTree(List<SysMenu> menus);
/**
* 构建前端所需要下拉树结构
*
* @param menus 菜单列表
* @return 下拉树结构列表
*/
public List<TreeSelect> buildMenuTreeSelect(List<SysMenu> menus);
/**
* 根据菜单ID查询信息
*
* @param menuId 菜单ID
* @return 菜单信息
*/
public SysMenu selectMenuById(Long menuId);
/**
* 是否存在菜单子节点
*
* @param menuId 菜单ID
* @return 结果 true 存在 false 不存在
*/
public boolean hasChildByMenuId(Long menuId);
/**
* 查询菜单是否存在角色
*
* @param menuId 菜单ID
* @return 结果 true 存在 false 不存在
*/
public boolean checkMenuExistRole(Long menuId);
/**
* 新增保存菜单信息
*
* @param menu 菜单信息
* @return 结果
*/
public int insertMenu(SysMenu menu);
/**
* 修改保存菜单信息
*
* @param menu 菜单信息
* @return 结果
*/
public int updateMenu(SysMenu menu);
/**
* 删除菜单管理信息
*
* @param menuId 菜单ID
* @return 结果
*/
public int deleteMenuById(Long menuId);
/**
* 校验菜单名称是否唯一
*
* @param menu 菜单信息
* @return 结果
*/
public String checkMenuNameUnique(SysMenu menu);
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.iotdb.db.sync.datasource;
import org.apache.iotdb.db.sync.externalpipe.operation.Operation;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ModsfileOpBlock extends AbstractOpBlock {
private static final Logger logger = LoggerFactory.getLogger(ModsfileOpBlock.class);
public ModsfileOpBlock(String sg, String modsFileName) {
super(sg, -1);
}
@Override
public long getDataCount() {
if (dataCount >= 0) {
return dataCount;
}
// ToDO:
return 0;
}
@Override
public Operation getOperation(long index, long length) {
return null;
}
@Override
public void close() {
super.close();
}
}
|
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rayo.core.verb;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.apache.commons.lang.builder.ToStringStyle;
public class Handset extends BaseVerb {
public String cryptoSuite = null;
public String localCrypto = null;
public String remoteCrypto = null;
public String codec = null;
public String stereo = null;
public String mixer = null;
public String group = null;
public String sipuri = null;
public String callId = null;
public Handset(String cryptoSuite, String localCrypto, String remoteCrypto, String codec, String stereo, String mixer)
{
this.cryptoSuite = cryptoSuite;
this.localCrypto = localCrypto;
this.remoteCrypto = remoteCrypto;
this.codec = codec;
this.stereo = stereo;
this.mixer = mixer;
}
public Handset(String sipuri, String mixer, String codec)
{
this.sipuri = sipuri;
this.mixer = mixer;
this.codec = codec;
}
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
.append("callId", getCallId())
.append("verbId", getVerbId())
.append("cryptoSuite",cryptoSuite)
.append("localCrypto",localCrypto)
.append("remoteCrypto",remoteCrypto)
.append("codec",codec)
.append("stereo",stereo)
.append("mixer",mixer)
.append("sipuri",sipuri)
.toString();
}
}
|
package app.keyconnect.sdk.wallets;
import java.math.BigDecimal;
import java.math.BigInteger;
import javax.annotation.Nullable;
public interface BlockchainWallet extends AddressableWallet {
/**
* Build payment transaction given the parameters and return a signed transaction that is
* ready to be sent
* @param to Recipient address
* @param value Payment value
* @param fee Fee value
* @param sequence Blockchain sequence to use (if relevant)
* @return A string representing a signed transaction that can be submitted to the blockchain
*/
String buildPaymentTransaction(String to, BigDecimal value, @Nullable BigInteger fee,
long sequence);
/**
* Returns passphrase (salt) if the wallet is a standalone wallet
* @return String Passphrase or null
*/
String getPassphrase();
/**
* Returns mnemonic if the wallet is a standalone wallet
* @return String Mnemonic or null
*/
String getMnemonic();
}
|
/*
* Copyright 2011 Edmunds.com, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.edmunds.etm.web.panel;
import org.apache.click.Control;
/**
* Marker interface for the GUI panel used to display information about the load balancer.
*
* @author David Trott
*/
public interface LoadBalancerConfigurationPanel extends Control {
}
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.appsync.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.appsync.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* NotFoundException JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class NotFoundExceptionUnmarshaller extends JsonErrorUnmarshaller<com.amazonaws.services.appsync.model.NotFoundException> {
private NotFoundExceptionUnmarshaller() {
super("NotFoundException");
}
@Override
public com.amazonaws.services.appsync.model.NotFoundException unmarshall(JsonUnmarshallerContext context) throws Exception {
com.amazonaws.services.appsync.model.NotFoundException notFoundException = new com.amazonaws.services.appsync.model.NotFoundException(null);
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return notFoundException;
}
private static NotFoundExceptionUnmarshaller instance;
public static NotFoundExceptionUnmarshaller getInstance() {
if (instance == null)
instance = new NotFoundExceptionUnmarshaller();
return instance;
}
}
|
package ro.pub.cs.systems.eim.practicaltest02;
import android.util.Log;
import android.widget.TextView;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.Socket;
import ro.pub.cs.systems.eim.practicaltest02.Constants;
import ro.pub.cs.systems.eim.practicaltest02.Utilities;
public class ClientThread extends Thread {
private String address;
private int port;
private String currency;
private TextView currencyTextView;
private Socket socket;
public ClientThread(String address, int port, String currency, TextView currencyTextView) {
this.address = address;
this.port = port;
this.currency = currency;
this.currencyTextView = currencyTextView;
}
@Override
public void run() {
try {
socket = new Socket(address, port);
if (socket == null) {
Log.e(Constants.TAG, "[CLIENT THREAD] Could not create socket!");
return;
}
BufferedReader bufferedReader = Utilities.getReader(socket);
PrintWriter printWriter = Utilities.getWriter(socket);
if (bufferedReader == null || printWriter == null) {
Log.e(Constants.TAG, "[CLIENT THREAD] Buffered Reader / Print Writer are null!");
return;
}
printWriter.println(currency);
printWriter.flush();
String currencyInformation;
while ((currencyInformation = bufferedReader.readLine()) != null) {
final String finalizedInformation = currencyInformation;
currencyTextView.post(new Runnable() {
@Override
public void run() {
currencyTextView.setText(finalizedInformation);
}
});
}
} catch (IOException ioException) {
Log.e(Constants.TAG, "[CLIENT THREAD] An exception has occurred: " + ioException.getMessage());
if (Constants.DEBUG) {
ioException.printStackTrace();
}
} finally {
if (socket != null) {
try {
socket.close();
} catch (IOException ioException) {
Log.e(Constants.TAG, "[CLIENT THREAD] An exception has occurred: " + ioException.getMessage());
if (Constants.DEBUG) {
ioException.printStackTrace();
}
}
}
}
}
}
|
/*
*
* * Copyright 2019-2020 the original author or authors.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * https://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package springfox.test.contract.oas.model;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Objects;
/**
* User
*/
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.SpringCodegen",
date = "2019-11-30T09:49:26.034469-01:00[Atlantic/Azores]")
public class User {
@JsonProperty("id")
private Long id;
@JsonProperty("username")
private String username;
@JsonProperty("firstName")
private String firstName;
@JsonProperty("lastName")
private String lastName;
@JsonProperty("email")
private String email;
@JsonProperty("password")
private String password;
@JsonProperty("phone")
private String phone;
@JsonProperty("userStatus")
private Integer userStatus;
public User id(Long id) {
this.id = id;
return this;
}
/**
* Get id
*
* @return id
*/
@io.swagger.v3.oas.annotations.media.Schema(example = "10", description = "")
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public User username(String username) {
this.username = username;
return this;
}
/**
* Get username
*
* @return username
*/
@io.swagger.v3.oas.annotations.media.Schema(example = "theUser", description = "")
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public User firstName(String firstName) {
this.firstName = firstName;
return this;
}
/**
* Get firstName
*
* @return firstName
*/
@io.swagger.v3.oas.annotations.media.Schema(example = "John", description = "")
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public User lastName(String lastName) {
this.lastName = lastName;
return this;
}
/**
* Get lastName
*
* @return lastName
*/
@io.swagger.v3.oas.annotations.media.Schema(example = "James", description = "")
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public User email(String email) {
this.email = email;
return this;
}
/**
* Get email
*
* @return email
*/
@io.swagger.v3.oas.annotations.media.Schema(example = "john@email.com", description = "")
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public User password(String password) {
this.password = password;
return this;
}
/**
* Get password
*
* @return password
*/
@io.swagger.v3.oas.annotations.media.Schema(example = "12345", description = "")
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public User phone(String phone) {
this.phone = phone;
return this;
}
/**
* Get phone
*
* @return phone
*/
@io.swagger.v3.oas.annotations.media.Schema(example = "12345", description = "")
public String getPhone() {
return phone;
}
public void setPhone(String phone) {
this.phone = phone;
}
public User userStatus(Integer userStatus) {
this.userStatus = userStatus;
return this;
}
/**
* User Status
*
* @return userStatus
*/
@io.swagger.v3.oas.annotations.media.Schema(example = "1", description = "User Status")
public Integer getUserStatus() {
return userStatus;
}
public void setUserStatus(Integer userStatus) {
this.userStatus = userStatus;
}
@SuppressWarnings("CyclomaticComplexity")
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
User user = (User) o;
return Objects.equals(this.id, user.id) &&
Objects.equals(this.username, user.username) &&
Objects.equals(this.firstName, user.firstName) &&
Objects.equals(this.lastName, user.lastName) &&
Objects.equals(this.email, user.email) &&
Objects.equals(this.password, user.password) &&
Objects.equals(this.phone, user.phone) &&
Objects.equals(this.userStatus, user.userStatus);
}
@Override
public int hashCode() {
return Objects.hash(id, username, firstName, lastName, email, password, phone, userStatus);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class User {\n");
sb.append(" id: ").append(toIndentedString(id)).append("\n");
sb.append(" username: ").append(toIndentedString(username)).append("\n");
sb.append(" firstName: ").append(toIndentedString(firstName)).append("\n");
sb.append(" lastName: ").append(toIndentedString(lastName)).append("\n");
sb.append(" email: ").append(toIndentedString(email)).append("\n");
sb.append(" password: ").append(toIndentedString(password)).append("\n");
sb.append(" phone: ").append(toIndentedString(phone)).append("\n");
sb.append(" userStatus: ").append(toIndentedString(userStatus)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
package mage.cards.d;
import mage.MageInt;
import mage.abilities.Ability;
import mage.abilities.common.SimpleStaticAbility;
import mage.abilities.costs.mana.ManaCostsImpl;
import mage.abilities.effects.ContinuousEffectImpl;
import mage.abilities.keyword.UnearthAbility;
import mage.cards.Card;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.*;
import mage.game.Game;
import mage.players.Player;
import java.util.UUID;
/**
* @author TheElk801
*/
public final class DregscapeSliver extends CardImpl {
public DregscapeSliver(UUID ownerId, CardSetInfo setInfo) {
super(ownerId, setInfo, new CardType[]{CardType.CREATURE}, "{1}{B}");
this.subtype.add(SubType.SLIVER);
this.power = new MageInt(2);
this.toughness = new MageInt(2);
// Each Sliver creature card in your graveyard has unearth {2}.
this.addAbility(new SimpleStaticAbility(new DregscapeSliverEffect()));
// Unearth {2}
this.addAbility(new UnearthAbility(new ManaCostsImpl("{2}")));
}
private DregscapeSliver(final DregscapeSliver card) {
super(card);
}
@Override
public DregscapeSliver copy() {
return new DregscapeSliver(this);
}
}
class DregscapeSliverEffect extends ContinuousEffectImpl {
DregscapeSliverEffect() {
super(Duration.WhileOnBattlefield, Layer.AbilityAddingRemovingEffects_6, SubLayer.NA, Outcome.AddAbility);
staticText = "Each Sliver creature card in your graveyard has unearth {2}";
}
private DregscapeSliverEffect(final DregscapeSliverEffect effect) {
super(effect);
}
@Override
public boolean apply(Game game, Ability source) {
Player controller = game.getPlayer(source.getControllerId());
if (controller == null) {
return false;
}
for (UUID cardId : controller.getGraveyard()) {
Card card = game.getCard(cardId);
if (card == null || !card.isCreature(game) || !card.hasSubtype(SubType.SLIVER, game)) {
continue;
}
UnearthAbility ability = new UnearthAbility(new ManaCostsImpl("{2}"));
ability.setSourceId(cardId);
ability.setControllerId(card.getOwnerId());
game.getState().addOtherAbility(card, ability);
}
return true;
}
@Override
public DregscapeSliverEffect copy() {
return new DregscapeSliverEffect(this);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.common.utils;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.constants.RemotingConstants;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.apache.dubbo.common.constants.CommonConstants.ANY_VALUE;
import static org.apache.dubbo.common.constants.CommonConstants.CLASSIFIER_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.COMMA_SPLIT_PATTERN;
import static org.apache.dubbo.common.constants.CommonConstants.ENABLED_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.GROUP_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.INTERFACE_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.PATH_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.PROTOCOL_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.REGISTRY_SPLIT_PATTERN;
import static org.apache.dubbo.common.constants.CommonConstants.REMOVE_VALUE_PREFIX;
import static org.apache.dubbo.common.constants.CommonConstants.VERSION_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.DUBBO_PROTOCOL;
import static org.apache.dubbo.common.constants.CommonConstants.HOST_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.PASSWORD_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.PORT_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.USERNAME_KEY;
import static org.apache.dubbo.common.constants.RegistryConstants.CATEGORY_KEY;
import static org.apache.dubbo.common.constants.RegistryConstants.CONFIGURATORS_CATEGORY;
import static org.apache.dubbo.common.constants.RegistryConstants.DEFAULT_CATEGORY;
import static org.apache.dubbo.common.constants.RegistryConstants.EMPTY_PROTOCOL;
import static org.apache.dubbo.common.constants.RegistryConstants.OVERRIDE_PROTOCOL;
import static org.apache.dubbo.common.constants.RegistryConstants.PROVIDERS_CATEGORY;
import static org.apache.dubbo.common.constants.RegistryConstants.ROUTERS_CATEGORY;
import static org.apache.dubbo.common.constants.RegistryConstants.ROUTE_PROTOCOL;
public class UrlUtils {
/**
* in the url string,mark the param begin
*/
private final static String URL_PARAM_STARTING_SYMBOL = "?";
public static URL parseURL(String address, Map<String, String> defaults) {
if (address == null || address.length() == 0) {
return null;
}
String url;
if (address.contains("://") || address.contains(URL_PARAM_STARTING_SYMBOL)) {
url = address;
} else {
String[] addresses = COMMA_SPLIT_PATTERN.split(address);
url = addresses[0];
if (addresses.length > 1) {
StringBuilder backup = new StringBuilder();
for (int i = 1; i < addresses.length; i++) {
if (i > 1) {
backup.append(",");
}
backup.append(addresses[i]);
}
url += URL_PARAM_STARTING_SYMBOL + RemotingConstants.BACKUP_KEY + "=" + backup.toString();
}
}
String defaultProtocol = defaults == null ? null : defaults.get(PROTOCOL_KEY);
if (defaultProtocol == null || defaultProtocol.length() == 0) {
defaultProtocol = DUBBO_PROTOCOL;
}
String defaultUsername = defaults == null ? null : defaults.get(USERNAME_KEY);
String defaultPassword = defaults == null ? null : defaults.get(PASSWORD_KEY);
int defaultPort = StringUtils.parseInteger(defaults == null ? null : defaults.get(PORT_KEY));
String defaultPath = defaults == null ? null : defaults.get(PATH_KEY);
Map<String, String> defaultParameters = defaults == null ? null : new HashMap<String, String>(defaults);
if (defaultParameters != null) {
defaultParameters.remove(PROTOCOL_KEY);
defaultParameters.remove(USERNAME_KEY);
defaultParameters.remove(PASSWORD_KEY);
defaultParameters.remove(HOST_KEY);
defaultParameters.remove(PORT_KEY);
defaultParameters.remove(PATH_KEY);
}
URL u = URL.valueOf(url);
boolean changed = false;
String protocol = u.getProtocol();
String username = u.getUsername();
String password = u.getPassword();
String host = u.getHost();
int port = u.getPort();
String path = u.getPath();
Map<String, String> parameters = new HashMap<String, String>(u.getParameters());
if ((protocol == null || protocol.length() == 0) && defaultProtocol != null && defaultProtocol.length() > 0) {
changed = true;
protocol = defaultProtocol;
}
if ((username == null || username.length() == 0) && defaultUsername != null && defaultUsername.length() > 0) {
changed = true;
username = defaultUsername;
}
if ((password == null || password.length() == 0) && defaultPassword != null && defaultPassword.length() > 0) {
changed = true;
password = defaultPassword;
}
/*if (u.isAnyHost() || u.isLocalHost()) {
changed = true;
host = NetUtils.getLocalHost();
}*/
if (port <= 0) {
if (defaultPort > 0) {
changed = true;
port = defaultPort;
} else {
changed = true;
port = 9090;
}
}
if (path == null || path.length() == 0) {
if (defaultPath != null && defaultPath.length() > 0) {
changed = true;
path = defaultPath;
}
}
if (defaultParameters != null && defaultParameters.size() > 0) {
for (Map.Entry<String, String> entry : defaultParameters.entrySet()) {
String key = entry.getKey();
String defaultValue = entry.getValue();
if (defaultValue != null && defaultValue.length() > 0) {
String value = parameters.get(key);
if (StringUtils.isEmpty(value)) {
changed = true;
parameters.put(key, defaultValue);
}
}
}
}
if (changed) {
u = new URL(protocol, username, password, host, port, path, parameters);
}
return u;
}
public static List<URL> parseURLs(String address, Map<String, String> defaults) {
if (address == null || address.length() == 0) {
return null;
}
// 使用分号(;)或竖线(|)分隔address属性值
String[] addresses = REGISTRY_SPLIT_PATTERN.split(address);
if (addresses == null || addresses.length == 0) {
return null; //here won't be empty
}
List<URL> registries = new ArrayList<URL>();
for (String addr : addresses) {
registries.add(parseURL(addr, defaults));
}
return registries;
}
public static Map<String, Map<String, String>> convertRegister(Map<String, Map<String, String>> register) {
Map<String, Map<String, String>> newRegister = new HashMap<String, Map<String, String>>();
for (Map.Entry<String, Map<String, String>> entry : register.entrySet()) {
String serviceName = entry.getKey();
Map<String, String> serviceUrls = entry.getValue();
if (!serviceName.contains(":") && !serviceName.contains("/")) {
for (Map.Entry<String, String> entry2 : serviceUrls.entrySet()) {
String serviceUrl = entry2.getKey();
String serviceQuery = entry2.getValue();
Map<String, String> params = StringUtils.parseQueryString(serviceQuery);
String group = params.get("group");
String version = params.get("version");
//params.remove("group");
//params.remove("version");
String name = serviceName;
if (group != null && group.length() > 0) {
name = group + "/" + name;
}
if (version != null && version.length() > 0) {
name = name + ":" + version;
}
Map<String, String> newUrls = newRegister.get(name);
if (newUrls == null) {
newUrls = new HashMap<String, String>();
newRegister.put(name, newUrls);
}
newUrls.put(serviceUrl, StringUtils.toQueryString(params));
}
} else {
newRegister.put(serviceName, serviceUrls);
}
}
return newRegister;
}
public static Map<String, String> convertSubscribe(Map<String, String> subscribe) {
Map<String, String> newSubscribe = new HashMap<String, String>();
for (Map.Entry<String, String> entry : subscribe.entrySet()) {
String serviceName = entry.getKey();
String serviceQuery = entry.getValue();
if (!serviceName.contains(":") && !serviceName.contains("/")) {
Map<String, String> params = StringUtils.parseQueryString(serviceQuery);
String group = params.get("group");
String version = params.get("version");
//params.remove("group");
//params.remove("version");
String name = serviceName;
if (group != null && group.length() > 0) {
name = group + "/" + name;
}
if (version != null && version.length() > 0) {
name = name + ":" + version;
}
newSubscribe.put(name, StringUtils.toQueryString(params));
} else {
newSubscribe.put(serviceName, serviceQuery);
}
}
return newSubscribe;
}
public static Map<String, Map<String, String>> revertRegister(Map<String, Map<String, String>> register) {
Map<String, Map<String, String>> newRegister = new HashMap<String, Map<String, String>>();
for (Map.Entry<String, Map<String, String>> entry : register.entrySet()) {
String serviceName = entry.getKey();
Map<String, String> serviceUrls = entry.getValue();
if (serviceName.contains(":") || serviceName.contains("/")) {
for (Map.Entry<String, String> entry2 : serviceUrls.entrySet()) {
String serviceUrl = entry2.getKey();
String serviceQuery = entry2.getValue();
Map<String, String> params = StringUtils.parseQueryString(serviceQuery);
String name = serviceName;
int i = name.indexOf('/');
if (i >= 0) {
params.put("group", name.substring(0, i));
name = name.substring(i + 1);
}
i = name.lastIndexOf(':');
if (i >= 0) {
params.put("version", name.substring(i + 1));
name = name.substring(0, i);
}
Map<String, String> newUrls = newRegister.get(name);
if (newUrls == null) {
newUrls = new HashMap<String, String>();
newRegister.put(name, newUrls);
}
newUrls.put(serviceUrl, StringUtils.toQueryString(params));
}
} else {
newRegister.put(serviceName, serviceUrls);
}
}
return newRegister;
}
public static Map<String, String> revertSubscribe(Map<String, String> subscribe) {
Map<String, String> newSubscribe = new HashMap<String, String>();
for (Map.Entry<String, String> entry : subscribe.entrySet()) {
String serviceName = entry.getKey();
String serviceQuery = entry.getValue();
if (serviceName.contains(":") || serviceName.contains("/")) {
Map<String, String> params = StringUtils.parseQueryString(serviceQuery);
String name = serviceName;
int i = name.indexOf('/');
if (i >= 0) {
params.put("group", name.substring(0, i));
name = name.substring(i + 1);
}
i = name.lastIndexOf(':');
if (i >= 0) {
params.put("version", name.substring(i + 1));
name = name.substring(0, i);
}
newSubscribe.put(name, StringUtils.toQueryString(params));
} else {
newSubscribe.put(serviceName, serviceQuery);
}
}
return newSubscribe;
}
public static Map<String, Map<String, String>> revertNotify(Map<String, Map<String, String>> notify) {
if (notify != null && notify.size() > 0) {
Map<String, Map<String, String>> newNotify = new HashMap<String, Map<String, String>>();
for (Map.Entry<String, Map<String, String>> entry : notify.entrySet()) {
String serviceName = entry.getKey();
Map<String, String> serviceUrls = entry.getValue();
if (!serviceName.contains(":") && !serviceName.contains("/")) {
if (serviceUrls != null && serviceUrls.size() > 0) {
for (Map.Entry<String, String> entry2 : serviceUrls.entrySet()) {
String url = entry2.getKey();
String query = entry2.getValue();
Map<String, String> params = StringUtils.parseQueryString(query);
String group = params.get("group");
String version = params.get("version");
// params.remove("group");
// params.remove("version");
String name = serviceName;
if (group != null && group.length() > 0) {
name = group + "/" + name;
}
if (version != null && version.length() > 0) {
name = name + ":" + version;
}
Map<String, String> newUrls = newNotify.get(name);
if (newUrls == null) {
newUrls = new HashMap<String, String>();
newNotify.put(name, newUrls);
}
newUrls.put(url, StringUtils.toQueryString(params));
}
}
} else {
newNotify.put(serviceName, serviceUrls);
}
}
return newNotify;
}
return notify;
}
//compatible for dubbo-2.0.0
public static List<String> revertForbid(List<String> forbid, Set<URL> subscribed) {
if (CollectionUtils.isNotEmpty(forbid)) {
List<String> newForbid = new ArrayList<String>();
for (String serviceName : forbid) {
if (!serviceName.contains(":") && !serviceName.contains("/")) {
for (URL url : subscribed) {
if (serviceName.equals(url.getServiceInterface())) {
newForbid.add(url.getServiceKey());
break;
}
}
} else {
newForbid.add(serviceName);
}
}
return newForbid;
}
return forbid;
}
public static URL getEmptyUrl(String service, String category) {
String group = null;
String version = null;
int i = service.indexOf('/');
if (i > 0) {
group = service.substring(0, i);
service = service.substring(i + 1);
}
i = service.lastIndexOf(':');
if (i > 0) {
version = service.substring(i + 1);
service = service.substring(0, i);
}
return URL.valueOf(EMPTY_PROTOCOL + "://0.0.0.0/" + service + URL_PARAM_STARTING_SYMBOL
+ CATEGORY_KEY + "=" + category
+ (group == null ? "" : "&" + GROUP_KEY + "=" + group)
+ (version == null ? "" : "&" + VERSION_KEY + "=" + version));
}
public static boolean isMatchCategory(String category, String categories) {
if (categories == null || categories.length() == 0) {
return DEFAULT_CATEGORY.equals(category);
} else if (categories.contains(ANY_VALUE)) {
return true;
} else if (categories.contains(REMOVE_VALUE_PREFIX)) {
return !categories.contains(REMOVE_VALUE_PREFIX + category);
} else {
return categories.contains(category);
}
}
public static boolean isMatch(URL consumerUrl, URL providerUrl) {
String consumerInterface = consumerUrl.getServiceInterface();
String providerInterface = providerUrl.getServiceInterface();
//FIXME accept providerUrl with '*' as interface name, after carefully thought about all possible scenarios I think it's ok to add this condition.
if (!(ANY_VALUE.equals(consumerInterface)
|| ANY_VALUE.equals(providerInterface)
|| StringUtils.isEquals(consumerInterface, providerInterface))) {
return false;
}
if (!isMatchCategory(providerUrl.getParameter(CATEGORY_KEY, DEFAULT_CATEGORY),
consumerUrl.getParameter(CATEGORY_KEY, DEFAULT_CATEGORY))) {
return false;
}
if (!providerUrl.getParameter(ENABLED_KEY, true)
&& !ANY_VALUE.equals(consumerUrl.getParameter(ENABLED_KEY))) {
return false;
}
String consumerGroup = consumerUrl.getParameter(GROUP_KEY);
String consumerVersion = consumerUrl.getParameter(VERSION_KEY);
String consumerClassifier = consumerUrl.getParameter(CLASSIFIER_KEY, ANY_VALUE);
String providerGroup = providerUrl.getParameter(GROUP_KEY);
String providerVersion = providerUrl.getParameter(VERSION_KEY);
String providerClassifier = providerUrl.getParameter(CLASSIFIER_KEY, ANY_VALUE);
return (ANY_VALUE.equals(consumerGroup) || StringUtils.isEquals(consumerGroup, providerGroup) || StringUtils.isContains(consumerGroup, providerGroup))
&& (ANY_VALUE.equals(consumerVersion) || StringUtils.isEquals(consumerVersion, providerVersion))
&& (consumerClassifier == null || ANY_VALUE.equals(consumerClassifier) || StringUtils.isEquals(consumerClassifier, providerClassifier));
}
public static boolean isMatchGlobPattern(String pattern, String value, URL param) {
if (param != null && pattern.startsWith("$")) {
pattern = param.getRawParameter(pattern.substring(1));
}
return isMatchGlobPattern(pattern, value);
}
public static boolean isMatchGlobPattern(String pattern, String value) {
if ("*".equals(pattern)) {
return true;
}
if (StringUtils.isEmpty(pattern) && StringUtils.isEmpty(value)) {
return true;
}
if (StringUtils.isEmpty(pattern) || StringUtils.isEmpty(value)) {
return false;
}
int i = pattern.lastIndexOf('*');
// doesn't find "*"
if (i == -1) {
return value.equals(pattern);
}
// "*" is at the end
else if (i == pattern.length() - 1) {
return value.startsWith(pattern.substring(0, i));
}
// "*" is at the beginning
else if (i == 0) {
return value.endsWith(pattern.substring(i + 1));
}
// "*" is in the middle
else {
String prefix = pattern.substring(0, i);
String suffix = pattern.substring(i + 1);
return value.startsWith(prefix) && value.endsWith(suffix);
}
}
public static boolean isServiceKeyMatch(URL pattern, URL value) {
return pattern.getParameter(INTERFACE_KEY).equals(
value.getParameter(INTERFACE_KEY))
&& isItemMatch(pattern.getParameter(GROUP_KEY),
value.getParameter(GROUP_KEY))
&& isItemMatch(pattern.getParameter(VERSION_KEY),
value.getParameter(VERSION_KEY));
}
public static List<URL> classifyUrls(List<URL> urls, Predicate<URL> predicate) {
return urls.stream().filter(predicate).collect(Collectors.toList());
}
public static boolean isConfigurator(URL url) {
return OVERRIDE_PROTOCOL.equals(url.getProtocol()) ||
CONFIGURATORS_CATEGORY.equals(url.getParameter(CATEGORY_KEY, DEFAULT_CATEGORY));
}
public static boolean isRoute(URL url) {
return ROUTE_PROTOCOL.equals(url.getProtocol()) ||
ROUTERS_CATEGORY.equals(url.getParameter(CATEGORY_KEY, DEFAULT_CATEGORY));
}
public static boolean isProvider(URL url) {
return !OVERRIDE_PROTOCOL.equals(url.getProtocol()) &&
!ROUTE_PROTOCOL.equals(url.getProtocol()) &&
PROVIDERS_CATEGORY.equals(url.getParameter(CATEGORY_KEY, PROVIDERS_CATEGORY));
}
/**
* Check if the given value matches the given pattern. The pattern supports wildcard "*".
*
* @param pattern pattern
* @param value value
* @return true if match otherwise false
*/
static boolean isItemMatch(String pattern, String value) {
if (pattern == null) {
return value == null;
} else {
return "*".equals(pattern) || pattern.equals(value);
}
}
}
|
package io.openindoormap.domain.converter;
import io.openindoormap.domain.common.Search;
import lombok.*;
import org.springframework.format.annotation.DateTimeFormat;
import java.math.BigDecimal;
import java.time.LocalDateTime;
/**
* f4d converter 변환 job
* @author jeongdae
*
*/
@ToString(callSuper = true)
@Builder
@Getter
@Setter
@NoArgsConstructor
@AllArgsConstructor
public class ConverterJob extends Search {
/**
* F4D 변환 대상 업로딩 목록. uploadDataId 를 ,로 연결
*/
private String converterCheckIds;
// converter 변환시, 단일 파일의 경우 validataion 체크용
private String dataType;
// job에 포함된 변환 파일 갯수
private Integer converterFileCount;
private Long converterJobFileId;
/****** validator ********/
private String methodMode;
// Primary Key
private Long converterJobId;
// 업로드 고유키
private Long uploadDataId;
// [중복] admin : 관리자용 데이터 그룹, user : 일반 사용자용 데이터 그룹
private String dataGroupTarget;
// 사용자 고유번호
private String userId;
// title
private String title;
// 변환 유형. basic : 기본, building : 빌딩, extra-big-building : 초대형 빌딩, point-cloud : point cloud 데이터
private String converterTemplate;
// unit scale factor. 설계 파일의 1이 의미하는 단위. 기본 1 = 0.01m
private BigDecimal usf;
// 높이방향. y축이 건물의 천장을 향하는 경우 Y. default = N
private String yAxisUp;
public String getViewYAxisUp() {
return this.yAxisUp;
}
// 대상 file 개수
private Integer fileCount;
// 상태. ready : 준비, success : 성공, waiting : 승인대기, fail : 실패
private String status;
// 상태(ENUM)
private ConverterJobStatus ConverterJobStatus;
// 데이터 변환 상태 집계
private Long statusCount;
// 에러 코드
private String errorCode;
// 년도
private String year;
// 월
private String month;
// 일
private String day;
// 일년중 몇주
private String yearWeek;
// 이번달 몇주
private String week;
// 시간
private String hour;
// 분
private String minute;
// 수정일
@DateTimeFormat(pattern="yyyy-MM-dd HH:mm:ss")
private LocalDateTime updateDate;
// 등록일
@DateTimeFormat(pattern="yyyy-MM-dd HH:mm:ss")
private LocalDateTime insertDate;
}
|
package com.mcupdater.mods.igideepresonance;
import net.minecraftforge.fml.common.FMLCommonHandler;
import net.minecraftforge.fml.common.Loader;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.ModMetadata;
import net.minecraftforge.fml.common.event.FMLPostInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPreInitializationEvent;
import net.minecraftforge.fml.relauncher.Side;
@Mod(useMetadata = true, modid = "IGI|DeepResonanceIntegration")
public class IGIDeepResonance
{
public static ModMetadata metadata;
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent event) {
metadata = event.getModMetadata();
}
@Mod.EventHandler
public void postInit(FMLPostInitializationEvent event) {
if (FMLCommonHandler.instance().getEffectiveSide() != Side.CLIENT) {
return;
}
if (Loader.isModLoaded("deepresonance")) {
TagDeepResonance.register();
}
}
}
|
package me.aias.example.utils;
import ai.djl.Model;
import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDArrays;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;
import ai.djl.ndarray.types.Shape;
import ai.djl.paddlepaddle.engine.PpNDArray;
import ai.djl.translate.Batchifier;
import ai.djl.translate.Translator;
import ai.djl.translate.TranslatorContext;
import ai.djl.util.Utils;
import org.apache.commons.lang3.StringUtils;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
public final class LacTranslator implements Translator<String, String[][]> {
LacTranslator() {}
private Map<String, String> word2id_dict = new HashMap<String, String>();
private Map<String, String> id2word_dict = new HashMap<String, String>();
private Map<String, String> label2id_dict = new HashMap<String, String>();
private Map<String, String> id2label_dict = new HashMap<String, String>();
private Map<String, String> word_replace_dict = new HashMap<String, String>();
private String oov_id;
private String input;
@Override
public void prepare(TranslatorContext ctx) throws IOException {
Model model = ctx.getModel();
try (InputStream is = model.getArtifact("lac/word.dic").openStream()) {
List<String> words = Utils.readLines(is, true);
words.stream()
.filter(word -> (word != null && word != ""))
.forEach(
word -> {
String[] ws = word.split(" ");
if (ws.length == 1) {
word2id_dict.put("", ws[0]); // 文字是key,id是value
id2word_dict.put(ws[0], "");
} else {
word2id_dict.put(ws[1], ws[0]); // 文字是key,id是value
id2word_dict.put(ws[0], ws[1]);
}
});
}
try (InputStream is = model.getArtifact("lac/tag.dic").openStream()) {
List<String> words = Utils.readLines(is, true);
words.stream()
.filter(word -> (word != null && word != ""))
.forEach(
word -> {
String[] ws = word.split(" ");
label2id_dict.put(ws[1], ws[0]); // 文字是key,id是value
id2label_dict.put(ws[0], ws[1]);
});
}
try (InputStream is = model.getArtifact("lac/q2b.dic").openStream()) {
List<String> words = Utils.readLines(is, true);
words.stream()
.forEach(
word -> {
if (StringUtils.isBlank(word)) {
word_replace_dict.put(" ", " "); // 文字是key,id是value
} else {
String[] ws = word.split(" ");
if (ws.length == 1) {
if (ws[0] != null) {
word_replace_dict.put(ws[0], ""); // 文字是key,id是value
} else {
word_replace_dict.put("", ws[1]); // 文字是key,id是value
}
} else {
word_replace_dict.put(ws[0], ws[1]); // 文字是key,id是value
}
}
});
}
oov_id = word2id_dict.get("OOV");
}
@Override
public NDList processInput(TranslatorContext ctx, String input) {
this.input = input;
NDManager manager = ctx.getNDManager();
NDList inputList = new NDList();
List<Long> lodList = new ArrayList<>(0);
lodList.add(new Long(0));
List<Long> sh = tokenizeSingleString(manager, input, lodList);
int size = Long.valueOf(lodList.get(lodList.size() - 1)).intValue();
long[] array = new long[size];
for (int i = 0; i < size; i++) {
if (sh.size() > i) {
array[i] = sh.get(i);
} else {
array[i] = 0;
}
}
NDArray ndArray = manager.create(array, new Shape(lodList.get(lodList.size() - 1), 1));
ndArray.setName("words");
long[][] lod = new long[1][2];
lod[0][0] = 0;
lod[0][1] = lodList.get(lodList.size() - 1);
((PpNDArray) ndArray).setLoD(lod);
return new NDList(ndArray);
}
@Override
public String[][] processOutput(TranslatorContext ctx, NDList list) {
String[] s = input.replace(" ", "").split("");
List<String> sent_out = new ArrayList<>();
List<String> tags_out = new ArrayList<>();
long[] array = list.get(0).toLongArray();
List<String> tags = new ArrayList<>();
// ['今天是个好日子']
// [[1209]
// [ 113]
// [1178]
// [3186]
// [ 517]
// [ 418]
// [ 90]]
// lod:
// [[0, 7]]
// output:
// [[54]
// [55]
// [38]
// [28]
// [14]
// [15]
// [15]]
// ['TIME-B', 'TIME-I', 'v-B', 'q-B', 'n-B', 'n-I', 'n-I']
for (int i = 0; i < array.length; i++) {
tags.add(id2label_dict.get(String.valueOf(array[i])));
}
for (int i = 0; i < tags.size(); i++) {
String tag = tags.get(i);
if (sent_out.size() == 0 || tag.endsWith("B") || tag.endsWith("S")) {
sent_out.add(s[i]);
tags_out.add(tag.substring(0, tag.length() - 2));
continue;
}
// 今
// ['今']
// 是
// ['今天', '是']
// 个
// ['今天', '是', '个']
// 好
// ['今天', '是', '个', '好']
sent_out.set(sent_out.size() - 1, sent_out.get(sent_out.size() - 1) + s[i]);
// ['TIME-B', 'TIME-I', 'v-B', 'q-B', 'n-B', 'n-I', 'n-I']
// ['TIME', 'TIME', 'v', 'q', 'n', 'n', 'n']
tags_out.set(tags_out.size() - 1, tag.substring(0, tag.length() - 2));
}
String[][] result = new String[2][sent_out.size()];
result[0] = (String[]) sent_out.toArray(new String[sent_out.size()]);
result[1] = (String[]) tags_out.toArray(new String[tags_out.size()]);
return result;
}
private List<Long> tokenizeSingleString(NDManager manager, String input, List<Long> lod) {
List<Long> word_ids = new ArrayList<>();
String[] s = input.replace(" ", "").split("");
for (String word : s) {
String newword = word_replace_dict.get(word);
word = StringUtils.isBlank(newword) ? word : newword;
String word_id = word2id_dict.get(word);
word_ids.add(Long.valueOf(StringUtils.isBlank(word_id) ? oov_id : word_id));
}
lod.add((long) word_ids.size());
return word_ids;
}
private NDArray stackInputs(List<NDList> tokenizedInputs, int index, String inputName) {
NDArray stacked =
NDArrays.stack(
tokenizedInputs.stream()
.map(list -> list.get(index).expandDims(0))
.collect(Collectors.toCollection(NDList::new)));
stacked.setName(inputName);
return stacked;
}
private NDArray tokenizeSingle(NDManager manager, String[] inputs, List<Integer> lod) {
List<Integer> word_ids = new ArrayList<>();
for (int i = 0; i < inputs.length; i++) {
String input = inputs[i];
String[] s = input.replace(" ", "").split("");
for (String word : s) {
String newword = word_replace_dict.get(word);
word = StringUtils.isBlank(newword) ? word : newword;
String word_id = word2id_dict.get(word);
word_ids.add(Integer.valueOf(StringUtils.isBlank(word_id) ? oov_id : word_id));
}
lod.add(word_ids.size() + lod.get(i));
}
return manager.create(word_ids.stream().mapToLong(l -> Long.valueOf(l)).toArray());
}
@Override
public Batchifier getBatchifier() {
return null;
}
}
|
/*
* Copyright 2004-2019 H2 Group. Multiple-Licensed under the MPL 2.0,
* and the EPL 1.0 (https://h2database.com/html/license.html).
* Initial Developer: H2 Group
*/
package org.h2.util;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.TreeMap;
import java.util.Vector;
import org.h2.store.fs.FileUtils;
/**
* Sorted properties file.
* This implementation requires that store() internally calls keys().
*/
public class SortedProperties extends Properties {
private static final long serialVersionUID = 1L;
@Override
public synchronized Enumeration<Object> keys() {
Vector<String> v = new Vector<>();
for (Object o : keySet()) {
v.add(o.toString());
}
Collections.sort(v);
return new Vector<Object>(v).elements();
}
/**
* Get a boolean property value from a properties object.
*
* @param prop the properties object
* @param key the key
* @param def the default value
* @return the value if set, or the default value if not
*/
public static boolean getBooleanProperty(Properties prop, String key,
boolean def) {
try {
return Utils.parseBoolean(prop.getProperty(key, null), def, true);
} catch (IllegalArgumentException e) {
e.printStackTrace();
return def;
}
}
/**
* Get an int property value from a properties object.
*
* @param prop the properties object
* @param key the key
* @param def the default value
* @return the value if set, or the default value if not
*/
public static int getIntProperty(Properties prop, String key, int def) {
String value = prop.getProperty(key, Integer.toString(def));
try {
return Integer.decode(value);
} catch (Exception e) {
e.printStackTrace();
return def;
}
}
/**
* Get a string property value from a properties object.
*
* @param prop the properties object
* @param key the key
* @param def the default value
* @return the value if set, or the default value if not
*/
public static String getStringProperty(Properties prop, String key, String def) {
return prop.getProperty(key, def);
}
/**
* Load a properties object from a file.
*
* @param fileName the name of the properties file
* @return the properties object
*/
public static synchronized SortedProperties loadProperties(String fileName)
throws IOException {
SortedProperties prop = new SortedProperties();
if (FileUtils.exists(fileName)) {
try (InputStream in = FileUtils.newInputStream(fileName)) {
prop.load(in);
}
}
return prop;
}
/**
* Store a properties file. The header and the date is not written.
*
* @param fileName the target file name
*/
public synchronized void store(String fileName) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
store(out, null);
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
InputStreamReader reader = new InputStreamReader(in, StandardCharsets.ISO_8859_1);
LineNumberReader r = new LineNumberReader(reader);
Writer w;
try {
w = new OutputStreamWriter(FileUtils.newOutputStream(fileName, false));
} catch (Exception e) {
throw new IOException(e.toString(), e);
}
try (PrintWriter writer = new PrintWriter(new BufferedWriter(w))) {
while (true) {
String line = r.readLine();
if (line == null) {
break;
}
if (!line.startsWith("#")) {
writer.print(line + "\n");
}
}
}
}
/**
* Convert the map to a list of line in the form key=value.
*
* @return the lines
*/
public synchronized String toLines() {
StringBuilder buff = new StringBuilder();
for (Entry<Object, Object> e : new TreeMap<>(this).entrySet()) {
buff.append(e.getKey()).append('=').append(e.getValue()).append('\n');
}
return buff.toString();
}
/**
* Convert a String to a map.
*
* @param s the string
* @return the map
*/
public static SortedProperties fromLines(String s) {
SortedProperties p = new SortedProperties();
for (String line : StringUtils.arraySplit(s, '\n', true)) {
int idx = line.indexOf('=');
if (idx > 0) {
p.put(line.substring(0, idx), line.substring(idx + 1));
}
}
return p;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.