text
stringlengths
7
1.01M
package com.recombee.api_client.api_requests; /* This file is auto-generated, do not edit */ import java.util.Date; import java.util.Map; import java.util.HashMap; import com.recombee.api_client.bindings.Logic; import com.recombee.api_client.util.HTTPMethod; /** * Adds a detail view of a given item made by a given user. */ public class AddDetailView extends Request { /** * User who viewed the item */ protected String userId; /** * Viewed item */ protected String itemId; /** * UTC timestamp of the view as ISO8601-1 pattern or UTC epoch time. The default value is the current time. */ protected Date timestamp; /** * Duration of the view */ protected Long duration; /** * Sets whether the given user/item should be created if not present in the database. */ protected Boolean cascadeCreate; /** * If this detail view is based on a recommendation request, `recommId` is the id of the clicked recommendation. */ protected String recommId; /** * A dictionary of additional data for the interaction. */ protected Map<String, Object> additionalData; /** * Construct the request * @param userId User who viewed the item * @param itemId Viewed item */ public AddDetailView (String userId,String itemId) { this.userId = userId; this.itemId = itemId; this.timeout = 1000; } /** * @param timestamp UTC timestamp of the view as ISO8601-1 pattern or UTC epoch time. The default value is the current time. */ public AddDetailView setTimestamp(Date timestamp) { this.timestamp = timestamp; return this; } /** * @param duration Duration of the view */ public AddDetailView setDuration(long duration) { this.duration = duration; return this; } /** * @param cascadeCreate Sets whether the given user/item should be created if not present in the database. */ public AddDetailView setCascadeCreate(boolean cascadeCreate) { this.cascadeCreate = cascadeCreate; return this; } /** * @param recommId If this detail view is based on a recommendation request, `recommId` is the id of the clicked recommendation. */ public AddDetailView setRecommId(String recommId) { this.recommId = recommId; return this; } /** * @param additionalData A dictionary of additional data for the interaction. */ public AddDetailView setAdditionalData(Map<String, Object> additionalData) { this.additionalData = additionalData; return this; } public String getUserId() { return this.userId; } public String getItemId() { return this.itemId; } public Date getTimestamp() { return this.timestamp; } public long getDuration() { return this.duration; } public boolean getCascadeCreate() { if (this.cascadeCreate==null) return false; return this.cascadeCreate; } public String getRecommId() { return this.recommId; } public Map<String, Object> getAdditionalData() { return this.additionalData; } /** * @return Used HTTP method */ @Override public HTTPMethod getHTTPMethod() { return HTTPMethod.POST; } /** * @return URI to the endpoint including path parameters */ @Override public String getPath() { return "/detailviews/"; } /** * Get query parameters * @return Values of query parameters (name of parameter: value of the parameter) */ @Override public Map<String, Object> getQueryParameters() { HashMap<String, Object> params = new HashMap<String, Object>(); return params; } /** * Get body parameters * @return Values of body parameters (name of parameter: value of the parameter) */ @Override public Map<String, Object> getBodyParameters() { HashMap<String, Object> params = new HashMap<String, Object>(); params.put("userId", this.userId); params.put("itemId", this.itemId); if (this.timestamp!=null) { params.put("timestamp", this.timestamp.getTime()/1000.0); } if (this.duration!=null) { params.put("duration", this.duration); } if (this.cascadeCreate!=null) { params.put("cascadeCreate", this.cascadeCreate); } if (this.recommId!=null) { params.put("recommId", this.recommId); } if (this.additionalData!=null) { params.put("additionalData", this.additionalData); } return params; } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.gsyvideoplayer.exosource; import static java.lang.Math.max; import static java.lang.Math.min; import android.net.Uri; import android.text.TextUtils; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlayerLibraryInfo; import com.google.android.exoplayer2.upstream.BaseDataSource; import com.google.android.exoplayer2.upstream.DataSourceException; import com.google.android.exoplayer2.upstream.DataSpec; import com.google.android.exoplayer2.upstream.DataSpec.HttpMethod; import com.google.android.exoplayer2.upstream.HttpDataSource; import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Util; import com.google.common.base.Predicate; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.InterruptedIOException; import java.io.OutputStream; import java.lang.reflect.Method; import java.net.HttpURLConnection; import java.net.NoRouteToHostException; import java.net.ProtocolException; import java.net.URL; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateException; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.GZIPInputStream; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLSession; import javax.net.ssl.SSLSocketFactory; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; /** * An {@link HttpDataSource} that uses Android's {@link HttpURLConnection}. * * <p>By default this implementation will not follow cross-protocol redirects (i.e. redirects from * HTTP to HTTPS or vice versa). Cross-protocol redirects can be enabled by using the {@link * #GSYDefaultHttpDataSource(String, int, int, boolean, RequestProperties)} constructor and passing * {@code true} for the {@code allowCrossProtocolRedirects} argument. * * <p>Note: HTTP request headers will be set using all parameters passed via (in order of decreasing * priority) the {@code dataSpec}, {@link #setRequestProperty} and the default parameters used to * construct the instance. */ public class GSYDefaultHttpDataSource extends BaseDataSource implements HttpDataSource { /** * The default connection timeout, in milliseconds. */ public static final int DEFAULT_CONNECT_TIMEOUT_MILLIS = 8 * 1000; /** * The default read timeout, in milliseconds. */ public static final int DEFAULT_READ_TIMEOUT_MILLIS = 8 * 1000; private static final String TAG = "DefaultHttpDataSource"; private static final int MAX_REDIRECTS = 20; // Same limit as okhttp. private static final int HTTP_STATUS_TEMPORARY_REDIRECT = 307; private static final int HTTP_STATUS_PERMANENT_REDIRECT = 308; private static final long MAX_BYTES_TO_DRAIN = 2048; private static final Pattern CONTENT_RANGE_HEADER = Pattern.compile("^bytes (\\d+)-(\\d+)/(\\d+)$"); private static final AtomicReference<byte[]> skipBufferReference = new AtomicReference<>(); private final boolean allowCrossProtocolRedirects; private final int connectTimeoutMillis; private final int readTimeoutMillis; private final String userAgent; @Nullable private final RequestProperties defaultRequestProperties; private final RequestProperties requestProperties; @Nullable private Predicate<String> contentTypePredicate; @Nullable private DataSpec dataSpec; @Nullable private HttpURLConnection connection; @Nullable private InputStream inputStream; private boolean opened; private int responseCode; private long bytesToSkip; private long bytesToRead; private long bytesSkipped; private long bytesRead; /** * Creates an instance. */ public GSYDefaultHttpDataSource() { this( ExoPlayerLibraryInfo.DEFAULT_USER_AGENT, DEFAULT_CONNECT_TIMEOUT_MILLIS, DEFAULT_READ_TIMEOUT_MILLIS); } /** * Creates an instance. * * @param userAgent The User-Agent string that should be used. */ public GSYDefaultHttpDataSource(String userAgent) { this(userAgent, DEFAULT_CONNECT_TIMEOUT_MILLIS, DEFAULT_READ_TIMEOUT_MILLIS); } /** * Creates an instance. * * @param userAgent The User-Agent string that should be used. * @param connectTimeoutMillis The connection timeout, in milliseconds. A timeout of zero is * interpreted as an infinite timeout. * @param readTimeoutMillis The read timeout, in milliseconds. A timeout of zero is interpreted as * an infinite timeout. */ public GSYDefaultHttpDataSource(String userAgent, int connectTimeoutMillis, int readTimeoutMillis) { this( userAgent, connectTimeoutMillis, readTimeoutMillis, /* allowCrossProtocolRedirects= */ false, /* defaultRequestProperties= */ null); } /** * Creates an instance. * * @param userAgent The User-Agent string that should be used. * @param connectTimeoutMillis The connection timeout, in milliseconds. A timeout of zero is * interpreted as an infinite timeout. Pass {@link #DEFAULT_CONNECT_TIMEOUT_MILLIS} to use the * default value. * @param readTimeoutMillis The read timeout, in milliseconds. A timeout of zero is interpreted as * an infinite timeout. Pass {@link #DEFAULT_READ_TIMEOUT_MILLIS} to use the default value. * @param allowCrossProtocolRedirects Whether cross-protocol redirects (i.e. redirects from HTTP * to HTTPS and vice versa) are enabled. * @param defaultRequestProperties The default request properties to be sent to the server as HTTP * headers or {@code null} if not required. */ public GSYDefaultHttpDataSource( String userAgent, int connectTimeoutMillis, int readTimeoutMillis, boolean allowCrossProtocolRedirects, @Nullable RequestProperties defaultRequestProperties) { super(/* isNetwork= */ true); this.userAgent = Assertions.checkNotEmpty(userAgent); this.requestProperties = new RequestProperties(); this.connectTimeoutMillis = connectTimeoutMillis; this.readTimeoutMillis = readTimeoutMillis; this.allowCrossProtocolRedirects = allowCrossProtocolRedirects; this.defaultRequestProperties = defaultRequestProperties; } /** * Creates an instance. * * @param userAgent The User-Agent string that should be used. * @param contentTypePredicate An optional {@link Predicate}. If a content type is rejected by the * predicate then a {@link HttpDataSource.InvalidContentTypeException} is thrown from {@link * #open(DataSpec)}. * @deprecated Use {@link #GSYDefaultHttpDataSource(String)} and {@link * #setContentTypePredicate(Predicate)}. */ @SuppressWarnings("deprecation") @Deprecated public GSYDefaultHttpDataSource(String userAgent, @Nullable Predicate<String> contentTypePredicate) { this( userAgent, contentTypePredicate, DEFAULT_CONNECT_TIMEOUT_MILLIS, DEFAULT_READ_TIMEOUT_MILLIS); } /** * Creates an instance. * * @param userAgent The User-Agent string that should be used. * @param contentTypePredicate An optional {@link Predicate}. If a content type is rejected by the * predicate then a {@link HttpDataSource.InvalidContentTypeException} is thrown from {@link * #open(DataSpec)}. * @param connectTimeoutMillis The connection timeout, in milliseconds. A timeout of zero is * interpreted as an infinite timeout. * @param readTimeoutMillis The read timeout, in milliseconds. A timeout of zero is interpreted as * an infinite timeout. * @deprecated Use {@link #GSYDefaultHttpDataSource(String, int, int)} and {@link * #setContentTypePredicate(Predicate)}. */ @SuppressWarnings("deprecation") @Deprecated public GSYDefaultHttpDataSource( String userAgent, @Nullable Predicate<String> contentTypePredicate, int connectTimeoutMillis, int readTimeoutMillis) { this( userAgent, contentTypePredicate, connectTimeoutMillis, readTimeoutMillis, /* allowCrossProtocolRedirects= */ false, /* defaultRequestProperties= */ null); } /** * Creates an instance. * * @param userAgent The User-Agent string that should be used. * @param contentTypePredicate An optional {@link Predicate}. If a content type is rejected by the * predicate then a {@link HttpDataSource.InvalidContentTypeException} is thrown from {@link * #open(DataSpec)}. * @param connectTimeoutMillis The connection timeout, in milliseconds. A timeout of zero is * interpreted as an infinite timeout. Pass {@link #DEFAULT_CONNECT_TIMEOUT_MILLIS} to use the * default value. * @param readTimeoutMillis The read timeout, in milliseconds. A timeout of zero is interpreted as * an infinite timeout. Pass {@link #DEFAULT_READ_TIMEOUT_MILLIS} to use the default value. * @param allowCrossProtocolRedirects Whether cross-protocol redirects (i.e. redirects from HTTP * to HTTPS and vice versa) are enabled. * @param defaultRequestProperties The default request properties to be sent to the server as HTTP * headers or {@code null} if not required. * @deprecated Use {@link #GSYDefaultHttpDataSource(String, int, int, boolean, RequestProperties)} * and {@link #setContentTypePredicate(Predicate)}. */ @Deprecated public GSYDefaultHttpDataSource( String userAgent, @Nullable Predicate<String> contentTypePredicate, int connectTimeoutMillis, int readTimeoutMillis, boolean allowCrossProtocolRedirects, @Nullable RequestProperties defaultRequestProperties) { super(/* isNetwork= */ true); this.userAgent = Assertions.checkNotEmpty(userAgent); this.contentTypePredicate = contentTypePredicate; this.requestProperties = new RequestProperties(); this.connectTimeoutMillis = connectTimeoutMillis; this.readTimeoutMillis = readTimeoutMillis; this.allowCrossProtocolRedirects = allowCrossProtocolRedirects; this.defaultRequestProperties = defaultRequestProperties; } /** * Sets a content type {@link Predicate}. If a content type is rejected by the predicate then a * {@link HttpDataSource.InvalidContentTypeException} is thrown from {@link #open(DataSpec)}. * * @param contentTypePredicate The content type {@link Predicate}, or {@code null} to clear a * predicate that was previously set. */ public void setContentTypePredicate(@Nullable Predicate<String> contentTypePredicate) { this.contentTypePredicate = contentTypePredicate; } @Nullable @Override public Uri getUri() { return connection == null ? null : Uri.parse(connection.getURL().toString()); } @Override public int getResponseCode() { return connection == null || responseCode <= 0 ? -1 : responseCode; } @NonNull @Override public Map<String, List<String>> getResponseHeaders() { return connection == null ? Collections.emptyMap() : connection.getHeaderFields(); } @Override public void setRequestProperty(@NonNull String name, @NonNull String value) { Assertions.checkNotNull(name); Assertions.checkNotNull(value); requestProperties.set(name, value); } @Override public void clearRequestProperty(@NonNull String name) { Assertions.checkNotNull(name); requestProperties.remove(name); } @Override public void clearAllRequestProperties() { requestProperties.clear(); } /** * Opens the source to read the specified data. */ @Override public long open(@NonNull DataSpec dataSpec) throws HttpDataSourceException { this.dataSpec = dataSpec; this.bytesRead = 0; this.bytesSkipped = 0; transferInitializing(dataSpec); try { connection = makeConnection(dataSpec); } catch (IOException e) { throw new HttpDataSourceException( "Unable to connect", e, dataSpec, HttpDataSourceException.TYPE_OPEN); } String responseMessage; try { responseCode = connection.getResponseCode(); responseMessage = connection.getResponseMessage(); } catch (IOException e) { closeConnectionQuietly(); throw new HttpDataSourceException( "Unable to connect", e, dataSpec, HttpDataSourceException.TYPE_OPEN); } // Check for a valid response code. if (responseCode < 200 || responseCode > 299) { Map<String, List<String>> headers = connection.getHeaderFields(); @Nullable InputStream errorStream = connection.getErrorStream(); byte[] errorResponseBody; try { errorResponseBody = errorStream != null ? Util.toByteArray(errorStream) : Util.EMPTY_BYTE_ARRAY; } catch (IOException e) { throw new HttpDataSourceException( "Error reading non-2xx response body", e, dataSpec, HttpDataSourceException.TYPE_OPEN); } closeConnectionQuietly(); InvalidResponseCodeException exception = new InvalidResponseCodeException( responseCode, responseMessage, headers, dataSpec, errorResponseBody); if (responseCode == 416) { exception.initCause(new DataSourceException(DataSourceException.POSITION_OUT_OF_RANGE)); } throw exception; } // Check for a valid content type. String contentType = connection.getContentType(); if (contentTypePredicate != null && !contentTypePredicate.apply(contentType)) { closeConnectionQuietly(); throw new InvalidContentTypeException(contentType, dataSpec); } // If we requested a range starting from a non-zero position and received a 200 rather than a // 206, then the server does not support partial requests. We'll need to manually skip to the // requested position. bytesToSkip = responseCode == 200 && dataSpec.position != 0 ? dataSpec.position : 0; // Determine the length of the data to be read, after skipping. boolean isCompressed = isCompressed(connection); if (!isCompressed) { if (dataSpec.length != C.LENGTH_UNSET) { bytesToRead = dataSpec.length; } else { long contentLength = getContentLength(connection); bytesToRead = contentLength != C.LENGTH_UNSET ? (contentLength - bytesToSkip) : C.LENGTH_UNSET; } } else { // Gzip is enabled. If the server opts to use gzip then the content length in the response // will be that of the compressed data, which isn't what we want. Always use the dataSpec // length in this case. bytesToRead = dataSpec.length; } try { inputStream = connection.getInputStream(); if (isCompressed) { inputStream = new GZIPInputStream(inputStream); } } catch (IOException e) { closeConnectionQuietly(); throw new HttpDataSourceException(e, dataSpec, HttpDataSourceException.TYPE_OPEN); } opened = true; transferStarted(dataSpec); return bytesToRead; } @Override public int read(@NonNull byte[] buffer, int offset, int readLength) throws HttpDataSourceException { try { skipInternal(); return readInternal(buffer, offset, readLength); } catch (IOException e) { throw new HttpDataSourceException(e, dataSpec, HttpDataSourceException.TYPE_READ); } } @Override public void close() throws HttpDataSourceException { try { if (inputStream != null) { maybeTerminateInputStream(connection, bytesRemaining()); try { inputStream.close(); } catch (IOException e) { throw new HttpDataSourceException(e, dataSpec, HttpDataSourceException.TYPE_CLOSE); } } } finally { inputStream = null; closeConnectionQuietly(); if (opened) { opened = false; transferEnded(); } } } /** * Returns the current connection, or null if the source is not currently opened. * * @return The current open connection, or null. */ @Nullable protected final HttpURLConnection getConnection() { return connection; } /** * Returns the number of bytes that have been skipped since the most recent call to * {@link #open(DataSpec)}. * * @return The number of bytes skipped. */ protected final long bytesSkipped() { return bytesSkipped; } /** * Returns the number of bytes that have been read since the most recent call to * {@link #open(DataSpec)}. * * @return The number of bytes read. */ protected final long bytesRead() { return bytesRead; } /** * Returns the number of bytes that are still to be read for the current {@link DataSpec}. * <p> * If the total length of the data being read is known, then this length minus {@code bytesRead()} * is returned. If the total length is unknown, {@link C#LENGTH_UNSET} is returned. * * @return The remaining length, or {@link C#LENGTH_UNSET}. */ protected final long bytesRemaining() { return bytesToRead == C.LENGTH_UNSET ? bytesToRead : bytesToRead - bytesRead; } /** * Establishes a connection, following redirects to do so where permitted. */ private HttpURLConnection makeConnection(DataSpec dataSpec) throws IOException { URL url = new URL(dataSpec.uri.toString()); @HttpMethod int httpMethod = dataSpec.httpMethod; @Nullable byte[] httpBody = dataSpec.httpBody; long position = dataSpec.position; long length = dataSpec.length; boolean allowGzip = dataSpec.isFlagSet(DataSpec.FLAG_ALLOW_GZIP); if (!allowCrossProtocolRedirects) { // HttpURLConnection disallows cross-protocol redirects, but otherwise performs redirection // automatically. This is the behavior we want, so use it. return makeConnection( url, httpMethod, httpBody, position, length, allowGzip, /* followRedirects= */ true, dataSpec.httpRequestHeaders); } // We need to handle redirects ourselves to allow cross-protocol redirects. int redirectCount = 0; while (redirectCount++ <= MAX_REDIRECTS) { HttpURLConnection connection = makeConnection( url, httpMethod, httpBody, position, length, allowGzip, /* followRedirects= */ false, dataSpec.httpRequestHeaders); int responseCode = connection.getResponseCode(); String location = connection.getHeaderField("Location"); if ((httpMethod == DataSpec.HTTP_METHOD_GET || httpMethod == DataSpec.HTTP_METHOD_HEAD) && (responseCode == HttpURLConnection.HTTP_MULT_CHOICE || responseCode == HttpURLConnection.HTTP_MOVED_PERM || responseCode == HttpURLConnection.HTTP_MOVED_TEMP || responseCode == HttpURLConnection.HTTP_SEE_OTHER || responseCode == HTTP_STATUS_TEMPORARY_REDIRECT || responseCode == HTTP_STATUS_PERMANENT_REDIRECT)) { connection.disconnect(); url = handleRedirect(url, location); } else if (httpMethod == DataSpec.HTTP_METHOD_POST && (responseCode == HttpURLConnection.HTTP_MULT_CHOICE || responseCode == HttpURLConnection.HTTP_MOVED_PERM || responseCode == HttpURLConnection.HTTP_MOVED_TEMP || responseCode == HttpURLConnection.HTTP_SEE_OTHER)) { // POST request follows the redirect and is transformed into a GET request. connection.disconnect(); httpMethod = DataSpec.HTTP_METHOD_GET; httpBody = null; url = handleRedirect(url, location); } else { return connection; } } // If we get here we've been redirected more times than are permitted. throw new NoRouteToHostException("Too many redirects: " + redirectCount); } /** * Configures a connection and opens it. * * @param url The url to connect to. * @param httpMethod The http method. * @param httpBody The body data, or {@code null} if not required. * @param position The byte offset of the requested data. * @param length The length of the requested data, or {@link C#LENGTH_UNSET}. * @param allowGzip Whether to allow the use of gzip. * @param followRedirects Whether to follow redirects. * @param requestParameters parameters (HTTP headers) to include in request. */ private HttpURLConnection makeConnection( URL url, @HttpMethod int httpMethod, @Nullable byte[] httpBody, long position, long length, boolean allowGzip, boolean followRedirects, Map<String, String> requestParameters) throws IOException { HttpURLConnection connection; if (url.getProtocol().endsWith("https")) { /**去除证书限制**/ connection = (HttpsURLConnection) url.openConnection(); ((HttpsURLConnection) connection).setHostnameVerifier(new HostnameVerifier() { @Override public boolean verify(String hostname, SSLSession session) { return true; } }); // Create a trust manager that does not validate certificate chains final TrustManager[] trustAllCerts = new TrustManager[]{ new X509TrustManager() { @Override public void checkClientTrusted(java.security.cert.X509Certificate[] chain, String authType) throws CertificateException { } @Override public void checkServerTrusted(java.security.cert.X509Certificate[] chain, String authType) throws CertificateException { } @Override public java.security.cert.X509Certificate[] getAcceptedIssuers() { return null; } } }; // Install the all-trusting trust manager final SSLContext sslContext; try { sslContext = SSLContext.getInstance("SSL"); sslContext.init(null, trustAllCerts, new java.security.SecureRandom()); // Create an ssl socket factory with our all-trusting manager final SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory(); ((HttpsURLConnection) connection).setSSLSocketFactory(sslSocketFactory); ((HttpsURLConnection) connection).setHostnameVerifier(new HostnameVerifier() { @Override public boolean verify(String hostname, SSLSession session) { return true; } }); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } catch (KeyManagementException e) { e.printStackTrace(); } /**去除证书限制**/ } else { connection = (HttpURLConnection) url.openConnection(); } connection.setConnectTimeout(connectTimeoutMillis); connection.setReadTimeout(readTimeoutMillis); Map<String, String> requestHeaders = new HashMap<>(); if (defaultRequestProperties != null) { requestHeaders.putAll(defaultRequestProperties.getSnapshot()); } requestHeaders.putAll(requestProperties.getSnapshot()); requestHeaders.putAll(requestParameters); for (Map.Entry<String, String> property : requestHeaders.entrySet()) { connection.setRequestProperty(property.getKey(), property.getValue()); } if (!(position == 0 && length == C.LENGTH_UNSET)) { String rangeRequest = "bytes=" + position + "-"; if (length != C.LENGTH_UNSET) { rangeRequest += (position + length - 1); } connection.setRequestProperty("Range", rangeRequest); } connection.setRequestProperty("User-Agent", userAgent); connection.setRequestProperty("Accept-Encoding", allowGzip ? "gzip" : "identity"); connection.setInstanceFollowRedirects(followRedirects); connection.setDoOutput(httpBody != null); connection.setRequestMethod(DataSpec.getStringForHttpMethod(httpMethod)); if (httpBody != null) { connection.setFixedLengthStreamingMode(httpBody.length); connection.connect(); OutputStream os = connection.getOutputStream(); os.write(httpBody); os.close(); } else { connection.connect(); } return connection; } /** * Creates an {@link HttpURLConnection} that is connected with the {@code url}. */ @VisibleForTesting /* package */ HttpURLConnection openConnection(URL url) throws IOException { return (HttpURLConnection) url.openConnection(); } /** * Handles a redirect. * * @param originalUrl The original URL. * @param location The Location header in the response. May be {@code null}. * @return The next URL. * @throws IOException If redirection isn't possible. */ private static URL handleRedirect(URL originalUrl, @Nullable String location) throws IOException { if (location == null) { throw new ProtocolException("Null location redirect"); } // Form the new url. URL url = new URL(originalUrl, location); // Check that the protocol of the new url is supported. String protocol = url.getProtocol(); if (!"https".equals(protocol) && !"http".equals(protocol)) { throw new ProtocolException("Unsupported protocol redirect: " + protocol); } // Currently this method is only called if allowCrossProtocolRedirects is true, and so the code // below isn't required. If we ever decide to handle redirects ourselves when cross-protocol // redirects are disabled, we'll need to uncomment this block of code. // if (!allowCrossProtocolRedirects && !protocol.equals(originalUrl.getProtocol())) { // throw new ProtocolException("Disallowed cross-protocol redirect (" // + originalUrl.getProtocol() + " to " + protocol + ")"); // } return url; } /** * Attempts to extract the length of the content from the response headers of an open connection. * * @param connection The open connection. * @return The extracted length, or {@link C#LENGTH_UNSET}. */ private static long getContentLength(HttpURLConnection connection) { long contentLength = C.LENGTH_UNSET; String contentLengthHeader = connection.getHeaderField("Content-Length"); if (!TextUtils.isEmpty(contentLengthHeader)) { try { contentLength = Long.parseLong(contentLengthHeader); } catch (NumberFormatException e) { Log.e(TAG, "Unexpected Content-Length [" + contentLengthHeader + "]"); } } String contentRangeHeader = connection.getHeaderField("Content-Range"); if (!TextUtils.isEmpty(contentRangeHeader)) { Matcher matcher = CONTENT_RANGE_HEADER.matcher(contentRangeHeader); if (matcher.find()) { try { long contentLengthFromRange = Long.parseLong(matcher.group(2)) - Long.parseLong(matcher.group(1)) + 1; if (contentLength < 0) { // Some proxy servers strip the Content-Length header. Fall back to the length // calculated here in this case. contentLength = contentLengthFromRange; } else if (contentLength != contentLengthFromRange) { // If there is a discrepancy between the Content-Length and Content-Range headers, // assume the one with the larger value is correct. We have seen cases where carrier // change one of them to reduce the size of a request, but it is unlikely anybody would // increase it. Log.w(TAG, "Inconsistent headers [" + contentLengthHeader + "] [" + contentRangeHeader + "]"); contentLength = max(contentLength, contentLengthFromRange); } } catch (NumberFormatException e) { Log.e(TAG, "Unexpected Content-Range [" + contentRangeHeader + "]"); } } } return contentLength; } /** * Skips any bytes that need skipping. Else does nothing. * <p> * This implementation is based roughly on {@code libcore.io.Streams.skipByReading()}. * * @throws InterruptedIOException If the thread is interrupted during the operation. * @throws EOFException If the end of the input stream is reached before the bytes are skipped. */ private void skipInternal() throws IOException { if (bytesSkipped == bytesToSkip) { return; } // Acquire the shared skip buffer. byte[] skipBuffer = skipBufferReference.getAndSet(null); if (skipBuffer == null) { skipBuffer = new byte[4096]; } while (bytesSkipped != bytesToSkip) { int readLength = (int) min(bytesToSkip - bytesSkipped, skipBuffer.length); int read = inputStream.read(skipBuffer, 0, readLength); if (Thread.currentThread().isInterrupted()) { throw new InterruptedIOException(); } if (read == -1) { throw new EOFException(); } bytesSkipped += read; bytesTransferred(read); } // Release the shared skip buffer. skipBufferReference.set(skipBuffer); } /** * Reads up to {@code length} bytes of data and stores them into {@code buffer}, starting at * index {@code offset}. * <p> * This method blocks until at least one byte of data can be read, the end of the opened range is * detected, or an exception is thrown. * * @param buffer The buffer into which the read data should be stored. * @param offset The start offset into {@code buffer} at which data should be written. * @param readLength The maximum number of bytes to read. * @return The number of bytes read, or {@link C#RESULT_END_OF_INPUT} if the end of the opened * range is reached. * @throws IOException If an error occurs reading from the source. */ private int readInternal(byte[] buffer, int offset, int readLength) throws IOException { if (readLength == 0) { return 0; } if (bytesToRead != C.LENGTH_UNSET) { long bytesRemaining = bytesToRead - bytesRead; if (bytesRemaining == 0) { return C.RESULT_END_OF_INPUT; } readLength = (int) min(readLength, bytesRemaining); } int read = inputStream.read(buffer, offset, readLength); if (read == -1) { if (bytesToRead != C.LENGTH_UNSET) { // End of stream reached having not read sufficient data. throw new EOFException(); } return C.RESULT_END_OF_INPUT; } bytesRead += read; bytesTransferred(read); return read; } /** * On platform API levels 19 and 20, okhttp's implementation of {@link InputStream#close} can * block for a long time if the stream has a lot of data remaining. Call this method before * closing the input stream to make a best effort to cause the input stream to encounter an * unexpected end of input, working around this issue. On other platform API levels, the method * does nothing. * * @param connection The connection whose {@link InputStream} should be terminated. * @param bytesRemaining The number of bytes remaining to be read from the input stream if its * length is known. {@link C#LENGTH_UNSET} otherwise. */ private static void maybeTerminateInputStream(HttpURLConnection connection, long bytesRemaining) { if (Util.SDK_INT != 19 && Util.SDK_INT != 20) { return; } try { InputStream inputStream = connection.getInputStream(); if (bytesRemaining == C.LENGTH_UNSET) { // If the input stream has already ended, do nothing. The socket may be re-used. if (inputStream.read() == -1) { return; } } else if (bytesRemaining <= MAX_BYTES_TO_DRAIN) { // There isn't much data left. Prefer to allow it to drain, which may allow the socket to be // re-used. return; } String className = inputStream.getClass().getName(); if ("com.android.okhttp.internal.http.HttpTransport$ChunkedInputStream".equals(className) || "com.android.okhttp.internal.http.HttpTransport$FixedLengthInputStream" .equals(className)) { Class<?> superclass = inputStream.getClass().getSuperclass(); Method unexpectedEndOfInput = superclass.getDeclaredMethod("unexpectedEndOfInput"); unexpectedEndOfInput.setAccessible(true); unexpectedEndOfInput.invoke(inputStream); } } catch (Exception e) { // If an IOException then the connection didn't ever have an input stream, or it was closed // already. If another type of exception then something went wrong, most likely the device // isn't using okhttp. } } /** * Closes the current connection quietly, if there is one. */ private void closeConnectionQuietly() { if (connection != null) { try { connection.disconnect(); } catch (Exception e) { Log.e(TAG, "Unexpected error while disconnecting", e); } connection = null; } } private static boolean isCompressed(HttpURLConnection connection) { String contentEncoding = connection.getHeaderField("Content-Encoding"); return "gzip".equalsIgnoreCase(contentEncoding); } }
/* * Copyright [2020] [MaxKey of copyright http://www.maxkey.top] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.maxkey.authz.oauth2.provider.code; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; import javax.sql.DataSource; import org.maxkey.authz.oauth2.provider.OAuth2Authentication; import org.maxkey.util.SerializationUtils; import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.RowMapper; import org.springframework.jdbc.core.support.SqlLobValue; import org.springframework.util.Assert; /** * Implementation of authorization code services that stores the codes and * authentication in a database. * * @author Crystal.Sea */ public class JdbcAuthorizationCodeServices extends RandomValueAuthorizationCodeServices { private static final String DEFAULT_SELECT_STATEMENT = "select code, authentication from oauth_code where code = ?"; private static final String DEFAULT_INSERT_STATEMENT = "insert into oauth_code (code, authentication) values (?, ?)"; private static final String DEFAULT_DELETE_STATEMENT = "delete from oauth_code where code = ?"; private String selectAuthenticationSql = DEFAULT_SELECT_STATEMENT; private String insertAuthenticationSql = DEFAULT_INSERT_STATEMENT; private String deleteAuthenticationSql = DEFAULT_DELETE_STATEMENT; private final JdbcTemplate jdbcTemplate; public JdbcAuthorizationCodeServices(DataSource dataSource) { Assert.notNull(dataSource, "DataSource required"); this.jdbcTemplate = new JdbcTemplate(dataSource); } public JdbcAuthorizationCodeServices(JdbcTemplate jdbcTemplate) { Assert.notNull(jdbcTemplate, "jdbcTemplate required"); this.jdbcTemplate = jdbcTemplate; } @Override protected void store(String code, OAuth2Authentication authentication) { jdbcTemplate.update(insertAuthenticationSql, new Object[] { code, new SqlLobValue(SerializationUtils.serialize(authentication)) }, new int[] { Types.VARCHAR, Types.BLOB }); } public OAuth2Authentication remove(String code) { OAuth2Authentication authentication; try { authentication = jdbcTemplate.queryForObject(selectAuthenticationSql, new RowMapper<OAuth2Authentication>() { public OAuth2Authentication mapRow(ResultSet rs, int rowNum) throws SQLException { return SerializationUtils.deserialize(rs.getBytes("authentication")); } }, code); } catch (EmptyResultDataAccessException e) { return null; } if (authentication != null) { jdbcTemplate.update(deleteAuthenticationSql, code); } return authentication; } public void setSelectAuthenticationSql(String selectAuthenticationSql) { this.selectAuthenticationSql = selectAuthenticationSql; } public void setInsertAuthenticationSql(String insertAuthenticationSql) { this.insertAuthenticationSql = insertAuthenticationSql; } public void setDeleteAuthenticationSql(String deleteAuthenticationSql) { this.deleteAuthenticationSql = deleteAuthenticationSql; } }
///////////////////////////////////////////////////////////////////////// // CIS 303 Algorithm Analysis and Design // Spring 2017 // Assignment 2, Chapter 3 // Client code for modified binary search ///////////////////////////////////////////////////////////////////////// import java.util.*; public class BinTest { public static int SEQ_ITS; public static void main (String [] args) { int [] a = new int[1000000]; fillRandom(a); System.out.println("\n*** Random array, size 1,000,000, find 9"); test (a, 9); System.out.println("\n*** Array, size 1,000,000, all 9s, find 9"); for(int i =0; i < 1000000; i++) a[i] = 9; test (a, 9); System.out.println("\n*** Array, size 1,000,000, all 2s, find 9"); for(int i =0; i < 1000000; i++) a[i] = 2; test (a, 9); System.out.println("\n*** Array, size 1,000,000, all 2s except for one final 9, find 9"); a[999999] = 9; test (a, 9); System.out.println("\n*** Array, size 1,000,000, all 9s except for one 2 at the beginnng, find 9"); for(int i =0; i < 1000000; i++) a[i] = 9; a[0] = 2; test (a, 9); } public static void test (int [] a, int target) { try { System.out.println("\nUsing binSearch, smallest position of " + target +": " + BinSearch.binSearch(a, target)); } catch (Exception e) { System.out.println (e); } catch (Error e) { System.out.println (e); } System.out.println("VERIFY: Using seqSearch, smallest position of " + target + ": " + seqSearch(a, target)); System.out.println("BIN_CALLS " + BinSearch.BIN_CALLS); System.out.println("SEQ_ITS " + SEQ_ITS); } public static void fillRandom (int [] a) { Random rand = new Random(); rand.setSeed(13); int i = 0; int value = 1; while (i < a.length) { int times = rand.nextInt(100000); int count = 0; while (i < a.length && count < times) { a[i++] = value; count++; } value++; } } public static int seqSearch(int [] a, int target) { SEQ_ITS = 0; for (int i = 0; i < a.length; i++) { SEQ_ITS++; if (a[i] == target) return i; } return -1; } }
/* * Copyright (C) 2016 Singular Studios (a.k.a Atom Tecnologia) - www.opensingular.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opensingular.lib.commons.canvas; import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; import org.opensingular.lib.commons.canvas.builder.RawHtmlBuilder; import org.opensingular.lib.commons.canvas.table.HtmlTableCanvas; import org.opensingular.lib.commons.canvas.table.TableCanvas; import javax.annotation.Nonnull; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.apache.commons.lang3.ObjectUtils.defaultIfNull; public class HtmlCanvas implements DocumentCanvas { private final boolean showTitleLevel; private final RawHtmlBuilder rootHtmlBuilder; private RawHtmlBuilder currentHtmlBuilder; private String titlePrefix; private int index; private int headerTagLevel; private Map<Integer, HtmlCanvas> indexChildMap; public HtmlCanvas(boolean showTitleLevel) { this(new RawHtmlBuilder("div"), showTitleLevel); } protected HtmlCanvas(RawHtmlBuilder rootHtmlBuilder, boolean showTitleLevel) { this.rootHtmlBuilder = rootHtmlBuilder; this.showTitleLevel = showTitleLevel; this.titlePrefix = ""; this.index = 0; this.indexChildMap = new HashMap<>(); this.headerTagLevel = 1; this.currentHtmlBuilder = rootHtmlBuilder; } @Override public void addSubtitle(String title) { String prefix = ""; if (showTitleLevel) { if (index > 0) { prefix = titlePrefix + index + " "; } index++; } if (headerTagLevel == 1) { addPageHeader(prefix, title); headerTagLevel++; } else { RawHtmlBuilder header = createSubheaderTag(headerTagLevel); header.appendText(prefix); header.appendText(ObjectUtils.defaultIfNull(title, "")); } } protected RawHtmlBuilder createSubheaderTag(Integer headerTagLevel) { return currentHtmlBuilder.newChild("h" + headerTagLevel); } protected void addPageHeader(String prefix, String title) { RawHtmlBuilder header = currentHtmlBuilder.newChild("h1"); header.appendText(prefix); header.appendText(ObjectUtils.defaultIfNull(title, "")); } @Override public DocumentCanvas addChild() { int titleIndex = (index - 1); if (!indexChildMap.containsKey(titleIndex)) { HtmlCanvas newChild = newHtmlChildCanvas(currentHtmlBuilder.newChild("div"), showTitleLevel); if (showTitleLevel) { newChild.index = 1; newChild.headerTagLevel = childHeaderTagLevel(); newChild.titlePrefix = titlePrefix + titleIndex + "."; } indexChildMap.put(titleIndex, newChild); } return indexChildMap.get(titleIndex); } @Nonnull protected HtmlCanvas newHtmlChildCanvas(RawHtmlBuilder child, boolean showTitleLevel) { return new HtmlCanvas(child, showTitleLevel); } private int childHeaderTagLevel() { int newHeaderTagLevel = headerTagLevel + 1; if (newHeaderTagLevel > 4) { return 4; } return newHeaderTagLevel; } @Override public void addFormItem(FormItem formItem) { RawHtmlBuilder span = this.currentHtmlBuilder.newChild("span"); span.putAttribute("style", "margin-right:25px;"); if (!StringUtils.isEmpty(formItem.getLabel())) { RawHtmlBuilder labelComp = span.newChild("label"); labelComp.putAttribute("style", "font-weight:bold;"); labelComp.appendText(formItem.getLabel()); labelComp.appendText(": "); } span.appendText(defaultIfNull(formItem.getValue(), "")); } @Override public void addLineBreak() { currentHtmlBuilder.newChild("br"); } @Override public void addList(List<String> values) { RawHtmlBuilder ul = this.currentHtmlBuilder.newChild("ul"); for (String v : values) { RawHtmlBuilder li = ul.newChild("li"); li.appendText(v); } } @Override public TableCanvas addTable() { addLineBreak(); return new HtmlTableCanvas(getRootHtmlBuilder().newChild("table")); } public void stylesheet(String css) { RawHtmlBuilder style = currentHtmlBuilder.newChild("style"); style.appendTextWithoutEscape(css); } public String build() { return rootHtmlBuilder.build(); } protected RawHtmlBuilder getcurrentHtmlBuilder() { return currentHtmlBuilder; } public void setcurrentHtmlBuilder(RawHtmlBuilder currentHtmlBuilder) { this.currentHtmlBuilder = currentHtmlBuilder; } public RawHtmlBuilder getRootHtmlBuilder() { return rootHtmlBuilder; } }
package io.quarkus.deployment; import static org.junit.jupiter.api.Assertions.assertAll; import static org.junit.jupiter.api.Assertions.assertEquals; import org.junit.jupiter.api.Test; public class CapabilityNameTest { @Test public void testName() { assertAll( () -> assertEquals("io.quarkus.agroal", Capability.AGROAL.getName()), () -> assertEquals("io.quarkus.security.jpa", Capability.SECURITY_JPA.getName()), () -> assertEquals("io.quarkus.container.image.docker", Capability.CONTAINER_IMAGE_DOCKER.getName())); } }
/* * Copyright (c) 2017 Martin Pfeffer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.celox.androiddemos.wificonnection; import org.junit.Test; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ public class ExampleUnitTest { @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }
package com.prowidesoftware.swift.model.mx; import com.prowidesoftware.swift.model.mx.dic.*; import com.prowidesoftware.swift.model.mx.AbstractMX; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import com.prowidesoftware.swift.model.MxSwiftMessage; import com.prowidesoftware.swift.model.mx.AbstractMX; import com.prowidesoftware.swift.model.mx.MxRead; import com.prowidesoftware.swift.model.mx.MxReadImpl; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; /** * Class for seev.031.002.01 ISO 20022 message. * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "Document", propOrder = { "corpActnNtfctn" }) @XmlRootElement(name = "Document", namespace = "urn:swift:xsd:seev.031.002.01") public class MxSeev03100201 extends AbstractMX { @XmlElement(name = "CorpActnNtfctn", required = true) protected CorporateActionNotification002V01 corpActnNtfctn; public final static transient String BUSINESS_PROCESS = "seev"; public final static transient int FUNCTIONALITY = 31; public final static transient int VARIANT = 2; public final static transient int VERSION = 1; @SuppressWarnings("rawtypes") public final static transient Class[] _classes = new Class[] {AccountAndBalance7 .class, AccountIdentification10 .class, AccountIdentification8Choice.class, AdditionalBusinessProcess1Code.class, AdditionalBusinessProcessFormat4Choice.class, AlternateIdentification2 .class, AmountAndQuantityRatio3 .class, AmountPrice4 .class, AmountPrice5 .class, AmountPricePerAmount3 .class, AmountPricePerFinancialInstrumentQuantity4 .class, AmountPriceType1Code.class, AmountPriceType2Code.class, AmountToAmountRatio3 .class, BalanceFormat2Choice.class, BeneficiaryCertificationType2Code.class, BeneficiaryCertificationType3Choice.class, CapitalGainFormat2Choice.class, CashAccountIdentification6Choice.class, CashOption8 .class, CertificationFormatType1Code.class, CertificationTypeFormat2Choice.class, ClassificationType3Choice.class, ConversionType1Code.class, ConversionTypeFormat2Choice.class, CopyDuplicate1Code.class, CorporateAction4 .class, CorporateActionAmounts7 .class, CorporateActionBalanceDetails7 .class, CorporateActionChangeType1Code.class, CorporateActionChangeTypeFormat3Choice.class, CorporateActionDate11 .class, CorporateActionDate12 .class, CorporateActionDate13 .class, CorporateActionEventProcessingType1Choice.class, CorporateActionEventProcessingType1Code.class, CorporateActionEventReference2 .class, CorporateActionEventReference2Choice.class, CorporateActionEventStage1Code.class, CorporateActionEventStageFormat2Choice.class, CorporateActionEventStatus1 .class, CorporateActionEventType4Choice.class, CorporateActionEventType6Code.class, CorporateActionFrequencyType1Code.class, CorporateActionGeneralInformation19 .class, CorporateActionMandatoryVoluntary1Code.class, CorporateActionMandatoryVoluntary2Choice.class, CorporateActionNarrative15 .class, CorporateActionNarrative17 .class, CorporateActionNarrative18 .class, CorporateActionNotification002V01 .class, CorporateActionNotification2 .class, CorporateActionNotificationType1Code.class, CorporateActionOption17 .class, CorporateActionOption2Code.class, CorporateActionOption9Choice.class, CorporateActionPeriod3 .class, CorporateActionPeriod5 .class, CorporateActionPrice13 .class, CorporateActionPrice14 .class, CorporateActionPrice15 .class, CorporateActionProcessingStatus1Choice.class, CorporateActionQuantity2 .class, CorporateActionRate12 .class, CorporateActionRate13 .class, CorporateActionRate14 .class, CorporateActionTaxableIncomePerShareCalculated1Code.class, CreditDebitCode.class, DateAndDateTimeChoice.class, DateCode6Choice.class, DateCode7Choice.class, DateCode8Choice.class, DateCodeAndTimeFormat2 .class, DateFormat10Choice.class, DateFormat12Choice.class, DateFormat13Choice.class, DateFormat8Choice.class, DateFormat9Choice.class, DateType1Code.class, DateType6Code.class, DateType7Code.class, DefaultProcessingOrStandingInstruction1Choice.class, DistributionType1Code.class, DistributionTypeFormat2Choice.class, DividendTypeFormat2Choice.class, DocumentIdentification16 .class, DocumentIdentification17 .class, DocumentIdentification19 .class, DocumentIdentification20 .class, DocumentIdentification2Choice.class, DocumentNumber2Choice.class, EUCapitalGain2Code.class, ElectionMovementType2Code.class, ElectionTypeFormat2Choice.class, EventCompletenessStatus1Code.class, EventConfirmationStatus1Code.class, Extension2 .class, ExtensionEnvelope1 .class, FinancialInstrumentAttributes10 .class, FinancialInstrumentAttributes11 .class, FinancialInstrumentAttributes12 .class, FinancialInstrumentQuantity14Choice.class, FinancialInstrumentQuantity15Choice.class, ForeignExchangeTerms12 .class, FractionDispositionType2Code.class, FractionDispositionType4Code.class, FractionDispositionType5Choice.class, FractionDispositionType6Choice.class, GenericIdentification23 .class, GenericIdentification24 .class, GenericIdentification25 .class, GenericIdentification26 .class, GrossDividendRateFormat4Choice.class, GrossDividendRateType1Code.class, IdentificationFormat2Choice.class, IdentificationSource2Choice.class, IndicativeOrMarketPrice3Choice.class, InterestComputationMethod2Code.class, InterestComputationMethodFormat2Choice.class, InterestRateUsedForPaymentFormat4Choice.class, IntermediateSecuritiesDistributionTypeFormat3Choice.class, IntermediateSecuritiesDistributionTypeFormat4Choice.class, IntermediateSecurityDistributionType2Code.class, IntermediateSecurityDistributionType3Code.class, LotteryType1Code.class, LotteryTypeFormat2Choice.class, MarketIdentification2Choice.class, MarketIdentification7 .class, MarketType3Code.class, MarketTypeFormat2Choice.class, MxSeev03100201 .class, NameAndAddress12 .class, NetDividendRateFormat4Choice.class, NetDividendRateType1Code.class, NonEligibleProceedsIndicator1Code.class, NonEligibleProceedsIndicator2Choice.class, OfferType1Code.class, OfferTypeFormat2Choice.class, OptionAvailabilityStatus1Code.class, OptionAvailabilityStatus2Choice.class, OptionFeatures2Code.class, OptionFeaturesFormat4Choice.class, OptionStyle2Code.class, OptionStyle5Choice.class, OriginalAndCurrentQuantities3 .class, OriginalAndCurrentQuantities4 .class, PartyIdentification16Choice.class, PartyIdentification18Choice.class, PercentagePrice1 .class, Period1Choice.class, Period3 .class, PriceFormat14Choice.class, PriceFormat15Choice.class, PriceFormat16Choice.class, PriceFormat18Choice.class, PriceRateType3Code.class, PriceValueType4Code.class, PriceValueType5Code.class, ProcessingPosition3Choice.class, ProcessingPosition3Code.class, ProprietaryQuantity4 .class, ProprietaryQuantity5 .class, Quantity10Choice.class, Quantity2Code.class, Quantity7Choice.class, Quantity8Choice.class, Quantity9Choice.class, QuantityToQuantityRatio2 .class, RateAndAmountFormat7Choice.class, RateAndAmountFormat8Choice.class, RateFormat2Choice.class, RateFormat3Choice.class, RateFormat5Choice.class, RateStatus1Code.class, RateStatus2Choice.class, RateType12Choice.class, RateType12Code.class, RateType14Choice.class, RateType15Choice.class, RateType16Choice.class, RateType3Code.class, RateType5Code.class, RateType7Code.class, RateType9Choice.class, RateType9Code.class, RateTypeAndAmountAndStatus10 .class, RateTypeAndAmountAndStatus2 .class, RateTypeAndAmountAndStatus7 .class, RateTypeAndAmountAndStatus8 .class, RateTypeAndAmountAndStatus9 .class, RateValueType6Code.class, RatioFormat10Choice.class, RatioFormat9Choice.class, RenounceableEntitlementStatusTypeFormat2Choice.class, RenounceableStatus1Code.class, RestrictedFINActiveCurrencyAnd13DecimalAmount.class, RestrictedFINActiveCurrencyAndAmount.class, SafekeepingAccountIdentification1Code.class, SafekeepingPlace1Code.class, SafekeepingPlace2Code.class, SafekeepingPlaceFormat5Choice.class, SafekeepingPlaceTypeAndAnyBICIdentifier1 .class, SafekeepingPlaceTypeAndText5 .class, SecuritiesOption12 .class, SecuritiesOption9 .class, SecurityDate4 .class, SecurityIdentification12 .class, SecurityIdentification12Choice.class, ShortLong1Code.class, SignedQuantityFormat3 .class, SignedQuantityFormat4 .class, SolicitationFeeRateFormat2Choice.class, TaxCreditRateFormat4Choice.class, TaxType4Code.class, TaxableIncomePerShareCalculatedFormat2Choice.class, TemporaryFinancialInstrumentIndicator2Choice.class, UpdatedAdditionalInformation4 .class, UpdatedAdditionalInformation5 .class, UpdatedAdditionalInformation6 .class, UpdatedURLlnformation1 .class }; public final static transient String NAMESPACE = "urn:swift:xsd:seev.031.002.01"; public MxSeev03100201() { super(); } /** * Creates the MX object parsing the parameter String with the XML content * */ public MxSeev03100201(final String xml) { this(); MxSeev03100201 tmp = parse(xml); corpActnNtfctn = tmp.getCorpActnNtfctn(); } /** * Creates the MX object parsing the raw content from the parameter MxSwiftMessage * */ public MxSeev03100201(final MxSwiftMessage mxSwiftMessage) { this(mxSwiftMessage.message()); } /** * Gets the value of the corpActnNtfctn property. * * @return * possible object is * {@link CorporateActionNotification002V01 } * */ public CorporateActionNotification002V01 getCorpActnNtfctn() { return corpActnNtfctn; } /** * Sets the value of the corpActnNtfctn property. * * @param value * allowed object is * {@link CorporateActionNotification002V01 } * */ public MxSeev03100201 setCorpActnNtfctn(CorporateActionNotification002V01 value) { this.corpActnNtfctn = value; return this; } @Override public String toString() { return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE); } @Override public boolean equals(Object that) { return EqualsBuilder.reflectionEquals(this, that); } @Override public int hashCode() { return HashCodeBuilder.reflectionHashCode(this); } @Override public String getBusinessProcess() { return BUSINESS_PROCESS; } @Override public int getFunctionality() { return FUNCTIONALITY; } @Override public int getVariant() { return VARIANT; } @Override public int getVersion() { return VERSION; } /** * Creates the MX object parsing the raw content from the parameter XML * */ public static MxSeev03100201 parse(String xml) { return ((MxSeev03100201) MxReadImpl.parse(MxSeev03100201 .class, xml, _classes)); } /** * Creates the MX object parsing the raw content from the parameter XML with injected read implementation * @since 9.0.1 * * @param parserImpl an MX unmarshall implementation */ public static MxSeev03100201 parse(String xml, MxRead parserImpl) { return ((MxSeev03100201) parserImpl.read(MxSeev03100201 .class, xml, _classes)); } @Override public String getNamespace() { return NAMESPACE; } @Override @SuppressWarnings("rawtypes") public Class[] getClasses() { return _classes; } /** * Creates an MxSeev03100201 messages from its JSON representation. * <p> * For generic conversion of JSON into the corresponding MX instance * see {@link AbstractMX#fromJson(String)} * * @since 7.10.2 * * @param json a JSON representation of an MxSeev03100201 message * @return * a new instance of MxSeev03100201 */ public final static MxSeev03100201 fromJson(String json) { return AbstractMX.fromJson(json, MxSeev03100201 .class); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.transport.nio; import java.io.EOFException; import java.io.IOException; import java.io.InterruptedIOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.channels.WritableByteChannel; import org.apache.activemq.transport.tcp.TimeStampStream; /** * An optimized buffered outputstream for Tcp * * @version $Revision: 1.1.1.1 $ */ public class NIOOutputStream extends OutputStream implements TimeStampStream { private static final int BUFFER_SIZE = 8192; private final WritableByteChannel out; private final byte[] buffer; private final ByteBuffer byteBuffer; private int count; private boolean closed; private volatile long writeTimestamp = -1;//concurrent reads of this value /** * Constructor * * @param out */ public NIOOutputStream(WritableByteChannel out) { this(out, BUFFER_SIZE); } /** * Creates a new buffered output stream to write data to the specified * underlying output stream with the specified buffer size. * * @param out the underlying output stream. * @param size the buffer size. * @throws IllegalArgumentException if size <= 0. */ public NIOOutputStream(WritableByteChannel out, int size) { this.out = out; if (size <= 0) { throw new IllegalArgumentException("Buffer size <= 0"); } buffer = new byte[size]; byteBuffer = ByteBuffer.wrap(buffer); } /** * write a byte on to the stream * * @param b - byte to write * @throws IOException */ public void write(int b) throws IOException { checkClosed(); if (availableBufferToWrite() < 1) { flush(); } buffer[count++] = (byte)b; } /** * write a byte array to the stream * * @param b the byte buffer * @param off the offset into the buffer * @param len the length of data to write * @throws IOException */ public void write(byte b[], int off, int len) throws IOException { checkClosed(); if (availableBufferToWrite() < len) { flush(); } if (buffer.length >= len) { System.arraycopy(b, off, buffer, count, len); count += len; } else { write(ByteBuffer.wrap(b, off, len)); } } /** * flush the data to the output stream This doesn't call flush on the * underlying outputstream, because Tcp is particularly efficent at doing * this itself .... * * @throws IOException */ public void flush() throws IOException { if (count > 0 && out != null) { byteBuffer.position(0); byteBuffer.limit(count); write(byteBuffer); count = 0; } } /** * close this stream * * @throws IOException */ public void close() throws IOException { super.close(); closed = true; } /** * Checks that the stream has not been closed * * @throws IOException */ protected void checkClosed() throws IOException { if (closed) { throw new EOFException("Cannot write to the stream any more it has already been closed"); } } /** * @return the amount free space in the buffer */ private int availableBufferToWrite() { return buffer.length - count; } protected void write(ByteBuffer data) throws IOException { int remaining = data.remaining(); int lastRemaining = remaining - 1; long delay = 1; try { writeTimestamp = System.currentTimeMillis(); while (remaining > 0) { // We may need to do a little bit of sleeping to avoid a busy loop. // Slow down if no data was written out.. if (remaining == lastRemaining) { try { // Use exponential rollback to increase sleep time. Thread.sleep(delay); delay *= 2; if (delay > 1000) { delay = 1000; } } catch (InterruptedException e) { throw new InterruptedIOException(); } } else { delay = 1; } lastRemaining = remaining; // Since the write is non-blocking, all the data may not have been // written. out.write(data); remaining = data.remaining(); } } finally { writeTimestamp = -1; } } /* (non-Javadoc) * @see org.apache.activemq.transport.tcp.TimeStampStream#isWriting() */ public boolean isWriting() { return writeTimestamp > 0; } /* (non-Javadoc) * @see org.apache.activemq.transport.tcp.TimeStampStream#getWriteTimestamp() */ public long getWriteTimestamp() { return writeTimestamp; } }
/* * Copyright 2019-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.vividus.selenium.mobileapp.screenshot; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.lenient; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.stream.Stream; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import org.mockito.Mock; import org.mockito.Spy; import org.mockito.junit.jupiter.MockitoExtension; import org.openqa.selenium.Dimension; import org.openqa.selenium.Point; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.vividus.selenium.mobileapp.MobileAppWebDriverManager; import org.vividus.ui.context.UiContext; import ru.yandex.qatools.ashot.coordinates.Coords; @ExtendWith(MockitoExtension.class) class MobileAppCoordsProviderTests { private static final Point POINT = new Point(10, 10); private static final Dimension DIMENSION = new Dimension(1, 1); @Mock private MobileAppWebDriverManager driverManager; @Mock private WebElement webElement; @Spy private UiContext uiContext; @Test void shoudProvideAdjustedWithNativeHeaderHeightCoordinates() { MobileAppCoordsProvider coordsProvider = new MobileAppCoordsProvider(true, driverManager, uiContext); when(driverManager.getStatusBarSize()).thenReturn(100); when(webElement.getLocation()).thenReturn(new Point(0, 234)); when(webElement.getSize()).thenReturn(DIMENSION); doReturn(null).when(uiContext).getSearchContext(); Coords coords = coordsProvider.ofElement(null, webElement); Assertions.assertAll(() -> assertEquals(0, coords.getX()), () -> assertEquals(134, coords.getY()), () -> assertEquals(1, coords.getWidth()), () -> assertEquals(1, coords.getHeight())); } @Test void shouldAdjustElementCoordsToTheCurrentSearchContext() { MobileAppCoordsProvider coordsProvider = new MobileAppCoordsProvider(true, driverManager, uiContext); WebElement contextElement = mock(WebElement.class); when(contextElement.getLocation()).thenReturn(POINT); when(contextElement.getSize()).thenReturn(new Dimension(100, 50)); when(webElement.getLocation()).thenReturn(new Point(5, 15)); when(webElement.getSize()).thenReturn(new Dimension(150, 30)); doReturn(contextElement).when(uiContext).getSearchContext(); Coords coords = coordsProvider.ofElement(null, webElement); Assertions.assertAll(() -> assertEquals(0, coords.getX()), () -> assertEquals(5, coords.getY()), () -> assertEquals(100, coords.getWidth()), () -> assertEquals(30, coords.getHeight())); } @Test void shouldNotAdjustCoordsForTheCurrentSearchContext() { MobileAppCoordsProvider coordsProvider = new MobileAppCoordsProvider(true, driverManager, uiContext); WebElement contextElement = mock(WebElement.class); when(contextElement.getLocation()).thenReturn(POINT); when(contextElement.getSize()).thenReturn(new Dimension(100, 50)); doReturn(contextElement).when(uiContext).getSearchContext(); Coords coords = coordsProvider.ofElement(null, contextElement); Assertions.assertAll( () -> assertEquals(10, coords.getX()), () -> assertEquals(10, coords.getY()), () -> assertEquals(100, coords.getWidth()), () -> assertEquals(50, coords.getHeight())); } @ParameterizedTest @MethodSource("coordsSource") void testCoordsIsMultipliedWithDpr(boolean downscale, Coords expectedCoords) { MobileAppCoordsProvider coordsDecorator = new MobileAppCoordsProvider(downscale, driverManager, uiContext); WebElement element = mock(WebElement.class); when(element.getLocation()).thenReturn(POINT); when(element.getSize()).thenReturn(DIMENSION); doReturn(element).when(uiContext).getSearchContext(); lenient().when(driverManager.getDpr()).thenReturn(2.0); WebDriver driver = mock(WebDriver.class); assertEquals(expectedCoords, coordsDecorator.ofElement(driver, element)); } static Stream<Arguments> coordsSource() { return Stream.of(Arguments.of(true, new Coords(10, 10, 1, 1)), Arguments.of(false, new Coords(20, 20, 2, 2)) ); } }
package com.loserico.cache.redis.cache.interfaze; import java.time.LocalDateTime; import java.util.concurrent.TimeUnit; /** * 支持设置过期时间及TTL操作 * <p> * Copyright: Copyright (c) 2018-05-19 21:18 * <p> * Company: DataSense * <p> * @author Rico Yu ricoyu520@gmail.com * @version 1.0 * @on */ public interface Expirable { /** * Set a timeout for object. After the timeout has expired, * the key will automatically be deleted. * * @param timeToLive - timeout before object will be deleted * @param timeUnit - timeout time unit * @return <code>true</code> if the timeout was set and <code>false</code> if not */ boolean expire(long timeToLive, TimeUnit timeUnit); /** * Set an expire date for object. When expire date comes * the key will automatically be deleted. * * @param timestamp - expire date in milliseconds (Unix timestamp) * @return <code>true</code> if the timeout was set and <code>false</code> if not */ boolean expireAt(long timestamp); /** * Set an expire date for object. When expire date comes * the key will automatically be deleted. * * @param localDateTime - expire date * @return <code>true</code> if the timeout was set and <code>false</code> if not */ boolean expireAt(LocalDateTime localDateTime); /** * Clear an expire timeout or expire date for object. * * @return <code>true</code> if timeout was removed * <code>false</code> if object does not exist or does not have an associated timeout */ boolean clearExpire(); /** * Remaining time to live of Redisson object that has a timeout * * @return time in milliseconds * -2 if the key does not exist. * -1 if the key exists but has no associated expire. */ long remainTimeToLive(); }
package com.example.practice.common; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import java.util.*; public class ListUtil2 { public static boolean isEqualList(final Collection<?> list1, final Collection<?> list2) { if (list1 == list2) return true; if (list1 == null ^ list2 == null) return false; if (list1.size() != list2.size()) return false; final Iterator<?> it1 = list1.iterator(); final Iterator<?> it2 = list2.iterator(); Object obj1; Object obj2; while (it1.hasNext() && it2.hasNext()) { obj1 = it1.next(); obj2 = it2.next(); if (obj1 == null && obj2 == null) continue; if (obj1 == null ^ obj2 == null) return false; if (!obj1.equals(obj2)) return false; } return !it1.hasNext() && !it2.hasNext(); } @Test public void test0() { List<List<Integer>> expectedList = new ArrayList<>(); expectedList.add(new ArrayList<>(Arrays.asList(1, 2, 3))); expectedList.add(new ArrayList<>(Arrays.asList(1, 5))); List<List<Integer>> actualList = new ArrayList<>(); actualList.add(new ArrayList<>(Arrays.asList(1, 2, 3))); actualList.add(new ArrayList<>(Arrays.asList(1, 5))); Assertions.assertTrue(ListUtil2.isEqualList(expectedList, actualList)); } @Test public void test00() { List<List<Integer>> expectedList = new ArrayList<>(); expectedList.add(new ArrayList<>(Arrays.asList(1, 2, 3))); expectedList.add(new ArrayList<>(Arrays.asList(1, 5))); List<List<Integer>> actualList = new ArrayList<>(); actualList.add(new ArrayList<>(Arrays.asList(1, 2, 3))); actualList.add(new ArrayList<>(Arrays.asList(1, 6))); Assertions.assertFalse(ListUtil2.isEqualList(expectedList, actualList)); } @Test public void test111() { List<List<List<Integer>>> expectedList = new ArrayList<>(); List<List<Integer>> expectedList1 = new ArrayList<>(); expectedList1.add(new ArrayList<>(Arrays.asList(1, 2, 3))); expectedList1.add(new ArrayList<>(Arrays.asList(1, 5))); expectedList.add(expectedList1); List<List<List<Integer>>> actualList = new ArrayList<>(); List<List<Integer>> actualList1 = new ArrayList<>(); actualList1.add(new ArrayList<>(Arrays.asList(1, 2, 3))); actualList1.add(new ArrayList<>(Arrays.asList(1, 5))); actualList.add(actualList1); Assertions.assertTrue(ListUtil2.isEqualList(expectedList, actualList)); } @Test public void test1() { Assertions.assertTrue(ListUtil2.isEqualList(null, null)); } @Test public void test2() { List<Integer> list1 = new ArrayList<>(); Assertions.assertFalse(ListUtil2.isEqualList(list1, null)); } @Test public void test3() { List<Integer> list2 = new ArrayList<>(); Assertions.assertFalse(ListUtil2.isEqualList(null, list2)); } @Test public void test4() { List<Integer> list1 = Arrays.asList(1, 2, 3, 4); List<Integer> list2 = Arrays.asList(1, 2, 3); Assertions.assertFalse(ListUtil2.isEqualList(list1, list2)); } @Test public void test5() { List<Integer> list1 = Arrays.asList(1, 2, 3, 4); List<Integer> list2 = Arrays.asList(1, 2, 3, 4); Assertions.assertTrue(ListUtil2.isEqualList(list1, list2)); } @Test public void test6() { List<Integer> list1 = Arrays.asList(1, 2, 3, 4); List<Integer> list2 = Arrays.asList(1, 3, 2, 4); Assertions.assertFalse(ListUtil2.isEqualList(list1, list2)); } @Test public void test7() { List<Integer> list1 = Arrays.asList(1, null, 3, 4); List<Integer> list2 = Arrays.asList(1, null, 2, 4); Assertions.assertFalse(ListUtil2.isEqualList(list1, list2)); } @Test public void test8() { List<Integer> list1 = Arrays.asList(null, null, null, 4); List<Integer> list2 = Arrays.asList(null, null, null, 4); Assertions.assertTrue(ListUtil2.isEqualList(list1, list2)); } @Test public void test9() { List<Integer> list1 = Arrays.asList(null, null, null, null); List<Integer> list2 = Arrays.asList(null, null, null, null); Assertions.assertTrue(ListUtil2.isEqualList(list1, list2)); } @Test public void test10() { List<Float> list1 = Arrays.asList(1.0f, null, null, null); List<Float> list2 = Arrays.asList(1.0f, null, null, null); Assertions.assertTrue(ListUtil2.isEqualList(list1, list2)); } @Test public void test11() { List<Double> list1 = Arrays.asList(1.0d, null, null, null); List<Double> list2 = Arrays.asList(1.0d, null, null, null); Assertions.assertTrue(ListUtil2.isEqualList(list1, list2)); } @Test public void test12() { List<String> list1 = Arrays.asList("1", "2"); List<Double> list2 = Arrays.asList(1.0d, 2.0d); Assertions.assertFalse(ListUtil2.isEqualList(list1, list2)); } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Created on Feb 8, 2005 * */ package ghidra.app.plugin.processors.sleigh.symbol; import java.util.ArrayList; import ghidra.app.plugin.processors.sleigh.FixedHandle; import ghidra.app.plugin.processors.sleigh.ParserWalker; import ghidra.app.plugin.processors.sleigh.SleighLanguage; import ghidra.app.plugin.processors.sleigh.expression.OperandValue; import ghidra.app.plugin.processors.sleigh.expression.PatternExpression; import ghidra.program.model.mem.MemoryAccessException; import ghidra.util.xml.SpecXmlUtils; import ghidra.xml.XmlElement; import ghidra.xml.XmlPullParser; /** * * * Variable representing an operand to a specific Constructor */ public class OperandSymbol extends SpecificSymbol { private int reloffset; // Relative offset // NOTE: Additional offset, specified in bytes (to the right) private int offsetbase; // Base operand to which offset is relative // NOTE: Relative to the end of the specified operand, given by index private int minimumlength; // Minimum size of operand (within tokens) private int hand; // Index of this operand in constructor private OperandValue localexp; private TripleSymbol triple; // Defining symbol private PatternExpression defexp; // OR defining expression private boolean codeaddress; // true if the operand is used as an address public int getRelativeOffset() { return reloffset; } public int getOffsetBase() { return offsetbase; } public int getMinimumLength() { return minimumlength; } public PatternExpression getDefiningExpression() { return defexp; } public TripleSymbol getDefiningSymbol() { return triple; } public int getIndex() { return hand; } public boolean isCodeAddress() { return codeaddress; } /* (non-Javadoc) * @see ghidra.app.plugin.processors.sleigh.symbol.TripleSymbol#getPatternExpression() */ @Override public PatternExpression getPatternExpression() { return localexp; } /* (non-Javadoc) * @see ghidra.app.plugin.processors.sleigh.symbol.TripleSymbol#getFixedHandle(ghidra.app.plugin.processors.sleigh.FixedHandle, ghidra.app.plugin.processors.sleigh.ParserWalker) */ @Override public void getFixedHandle(FixedHandle hnd, ParserWalker walker) { FixedHandle h = walker.getFixedHandle(hand); hnd.space = h.space; hnd.offset_space = h.offset_space; hnd.offset_offset = h.offset_offset; hnd.offset_size = h.offset_size; hnd.size = h.size; hnd.temp_space = h.temp_space; hnd.temp_offset = h.temp_offset; } /* (non-Javadoc) * @see ghidra.app.plugin.processors.sleigh.symbol.TripleSymbol#print(ghidra.app.plugin.processors.sleigh.ParserWalker) */ @Override public String print(ParserWalker walker) throws MemoryAccessException { String res; walker.pushOperand(hand); if (triple != null) { if (triple instanceof SubtableSymbol) res = walker.getConstructor().print(walker); else res = triple.print(walker); } else { // Must be expression resulting in a constant long val = defexp.getValue(walker); if (val >= 0) res = "0x" + Long.toHexString(val); else res = "-0x" + Long.toHexString(-val); } walker.popOperand(); return res; } @Override public void printList(ParserWalker walker, ArrayList<Object> list) throws MemoryAccessException { walker.pushOperand(hand); if (triple != null) { if (triple instanceof SubtableSymbol) walker.getConstructor().printList(walker, list); else triple.printList(walker, list); } else { FixedHandle handle = walker.getParentHandle(); if (handle.offset_size == 0) { handle.offset_size = walker.getCurrentLength(); } list.add(handle); } walker.popOperand(); } /* (non-Javadoc) * @see ghidra.app.plugin.processors.sleigh.symbol.Symbol#restoreXml(org.jdom.Element, ghidra.app.plugin.processors.sleigh.SleighLanguage) */ @Override public void restoreXml(XmlPullParser parser, SleighLanguage lang) { XmlElement el = parser.start("operand_sym"); defexp = null; triple = null; codeaddress = false; hand = SpecXmlUtils.decodeInt(el.getAttribute("index")); reloffset = SpecXmlUtils.decodeInt(el.getAttribute("off")); offsetbase = SpecXmlUtils.decodeInt(el.getAttribute("base")); minimumlength = SpecXmlUtils.decodeInt(el.getAttribute("minlen")); String attrstr = el.getAttribute("subsym"); if (attrstr != null) { int id = SpecXmlUtils.decodeInt(attrstr); triple = (TripleSymbol) lang.getSymbolTable().findSymbol(id); } codeaddress = SpecXmlUtils.decodeBoolean(el.getAttribute("code")); localexp = (OperandValue) PatternExpression.restoreExpression(parser, lang); if (!parser.peek().isEnd()) defexp = PatternExpression.restoreExpression(parser, lang); parser.end(el); } @Override public String toString() { return this.getName() + " : " + this.getId(); } }
/* * Copyright (C) 2018 Seoul National University * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.snu.nemo.compiler.optimizer.pass.compiletime.reshaping; import edu.snu.nemo.common.dag.DAG; import edu.snu.nemo.common.dag.DAGBuilder; import edu.snu.nemo.common.ir.edge.IREdge; import edu.snu.nemo.common.ir.edge.executionproperty.CommunicationPatternProperty; import edu.snu.nemo.common.ir.edge.executionproperty.DecoderProperty; import edu.snu.nemo.common.ir.edge.executionproperty.EncoderProperty; import edu.snu.nemo.common.ir.vertex.IRVertex; import edu.snu.nemo.common.ir.vertex.MetricCollectionBarrierVertex; import edu.snu.nemo.common.ir.vertex.OperatorVertex; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * Pass to modify the DAG for a job to perform data skew. * It adds a {@link MetricCollectionBarrierVertex} before Shuffle edges, to make a barrier before it, * and to use the metrics to repartition the skewed data. * NOTE: we currently put the SkewCompositePass at the end of the list for each policies, as it needs to take * a snapshot at the end of the pass. This could be prevented by modifying other passes to take the snapshot of the * DAG at the end of each passes for metricCollectionVertices. */ public final class SkewReshapingPass extends ReshapingPass { /** * Default constructor. */ public SkewReshapingPass() { super(Collections.singleton(CommunicationPatternProperty.class)); } @Override public DAG<IRVertex, IREdge> apply(final DAG<IRVertex, IREdge> dag) { final DAGBuilder<IRVertex, IREdge> builder = new DAGBuilder<>(); final List<MetricCollectionBarrierVertex> metricCollectionVertices = new ArrayList<>(); dag.topologicalDo(v -> { // We care about OperatorVertices that have any incoming edges that are of type Shuffle. if (v instanceof OperatorVertex && dag.getIncomingEdgesOf(v).stream().anyMatch(irEdge -> CommunicationPatternProperty.Value.Shuffle .equals(irEdge.getPropertyValue(CommunicationPatternProperty.class).get()))) { final MetricCollectionBarrierVertex<Integer, Long> metricCollectionBarrierVertex = new MetricCollectionBarrierVertex<>(); metricCollectionVertices.add(metricCollectionBarrierVertex); builder.addVertex(v); builder.addVertex(metricCollectionBarrierVertex); dag.getIncomingEdgesOf(v).forEach(edge -> { // we insert the metric collection vertex when we meet a shuffle edge if (CommunicationPatternProperty.Value.Shuffle .equals(edge.getPropertyValue(CommunicationPatternProperty.class).get())) { // We then insert the dynamicOptimizationVertex between the vertex and incoming vertices. final IREdge newEdge = new IREdge(CommunicationPatternProperty.Value.OneToOne, edge.getSrc(), metricCollectionBarrierVertex); newEdge.setProperty(EncoderProperty.of(edge.getPropertyValue(EncoderProperty.class).get())); newEdge.setProperty(DecoderProperty.of(edge.getPropertyValue(DecoderProperty.class).get())); final IREdge edgeToGbK = new IREdge(edge.getPropertyValue(CommunicationPatternProperty.class).get(), metricCollectionBarrierVertex, v, edge.isSideInput()); edge.copyExecutionPropertiesTo(edgeToGbK); builder.connectVertices(newEdge); builder.connectVertices(edgeToGbK); } else { builder.connectVertices(edge); } }); } else { // Others are simply added to the builder, unless it comes from an updated vertex builder.addVertex(v); dag.getIncomingEdgesOf(v).forEach(builder::connectVertices); } }); final DAG<IRVertex, IREdge> newDAG = builder.build(); metricCollectionVertices.forEach(v -> v.setDAGSnapshot(newDAG)); return newDAG; } }
package org.cipango.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.Random; import org.cipango.util.TimerQueue.Node; import org.junit.Before; import org.junit.Test; public class TimerQueueTest { private Random _random = new Random(); private TimerQueue<Node> _queue; @Before public void setUp() { _queue = new TimerQueue<Node>(); } void fillQueue(int nb) { for (int i = 0; i < nb; i++) { Node node = new Node(Math.abs(_random.nextLong())); _queue.offer(node); } assertEquals(nb, _queue.getSize()); } @Test public void testPriority() { fillQueue(10000); checkPriority(); } void checkPriority() { long value = -1; while (_queue.getSize() > 0) { Node node = _queue.poll(); assertTrue(value <= node.getValue()); value = node.getValue(); } } @Test public void testFailed() { Node node = new Node(1); _queue.offer(node); _queue.remove(node); } @Test public void testRemove() { for (int n = 0; n < 1000; n++) { fillQueue(100); Node[] nodes = _queue.toArray(); for (int i = 0; i < 500; i++) { int j = _random.nextInt(nodes.length); Node node = nodes[j]; nodes[j] = null; if (node != null) _queue.remove(node); } checkPriority(); while (_queue.getSize() > 0) _queue.poll(); } } @Test public void testReschedule() { for (int n = 0; n < 1000; n++) { fillQueue(100); Node[] nodes = _queue.toArray(); for (int i = 0; i < 500; i++) { int j = _random.nextInt(nodes.length); _queue.offer(nodes[j], Math.abs(_random.nextLong())); } checkPriority(); while (_queue.getSize() > 0) _queue.poll(); } } }
package mb.statix.scopegraph.diff; import java.util.Deque; import java.util.Map.Entry; import java.util.Optional; import java.util.PriorityQueue; import java.util.Queue; import org.metaborg.util.functions.Function1; import com.google.common.collect.Lists; import com.google.common.collect.Streams; import io.usethesource.capsule.Map; import io.usethesource.capsule.Map.Transient; import io.usethesource.capsule.Set; import io.usethesource.capsule.util.stream.CapsuleCollectors; import mb.statix.scopegraph.IScopeGraph; public class ScopeGraphDiffer<S, L, D> { private final S s0; private final IScopeGraph.Immutable<S, L, D> current; private final IScopeGraph.Immutable<S, L, D> previous; private final Set.Immutable<L> labels; private final ScopeGraphDifferOps<S, D> diffOps; private final BiMap.Transient<S> matchedScopes = BiMap.Transient.of(); private final BiMap.Transient<Edge<S, L>> matchedEdges = BiMap.Transient.of(); private final Set.Transient<S> seenCurrentScopes = Set.Transient.of(); private final Set.Transient<Edge<S, L>> seenCurrentEdges = Set.Transient.of(); private final Set.Transient<S> seenPreviousScopes = Set.Transient.of(); private final Set.Transient<Edge<S, L>> seenPreviousEdges = Set.Transient.of(); private final Queue<EdgeMatch> worklist = new PriorityQueue<>(); private ScopeGraphDiffer(S s0, IScopeGraph.Immutable<S, L, D> current, IScopeGraph.Immutable<S, L, D> previous, ScopeGraphDifferOps<S, D> diffOps) { this.s0 = s0; this.current = current; this.previous = previous; this.labels = Set.Immutable.union(current.getEdgeLabels(), previous.getEdgeLabels()); this.diffOps = diffOps; } private ScopeGraphDiff<S, L, D> doDiff() { if(!matchScopes(BiMap.Immutable.of(s0, s0))) { throw new IllegalStateException(); } while(!worklist.isEmpty()) { final EdgeMatch m = worklist.remove(); matchEdge(m.currentEdge, m.previousEdges); } final Map.Transient<S, D> addedScopes = Map.Transient.of(); final Set.Transient<Edge<S, L>> addedEdges = Set.Transient.of(); finishDiff(current, diffOps::getCurrentScopes, seenCurrentScopes, seenCurrentEdges, matchedScopes.keySet(), matchedEdges.keySet(), addedScopes, addedEdges); final Map.Transient<S, D> removedScopes = Map.Transient.of(); final Set.Transient<Edge<S, L>> removedEdges = Set.Transient.of(); finishDiff(previous, diffOps::getPreviousScopes, seenPreviousScopes, seenPreviousEdges, matchedScopes.valueSet(), matchedEdges.valueSet(), removedScopes, removedEdges); final ScopeGraphDiff<S, L, D> diff = new ScopeGraphDiff<>(matchedScopes.freeze(), matchedEdges.freeze(), addedScopes.freeze(), addedEdges.freeze(), removedScopes.freeze(), removedEdges.freeze()); return diff; } private static <S, L, D> void finishDiff(IScopeGraph<S, L, D> scopeGraph, Function1<D, java.util.Set<S>> getScopes, Iterable<S> seenScopes, Iterable<Edge<S, L>> seenEdges, java.util.Set<S> matchedScopes, java.util.Set<Edge<S, L>> matchedEdges, Transient<S, D> missedScopes, Set.Transient<Edge<S, L>> missedEdges) { final Deque<S> scopeList = Lists.newLinkedList(seenScopes); final Deque<Edge<S, L>> edgeList = Lists.newLinkedList(seenEdges); while(!scopeList.isEmpty() || !edgeList.isEmpty()) { while(!scopeList.isEmpty()) { final S scope = scopeList.pop(); if(matchedScopes.contains(scope) || missedScopes.containsKey(scope)) { continue; } final Optional<D> datum = scopeGraph.getData(scope); missedScopes.__put(scope, datum.orElse(null)); if(datum.isPresent()) { scopeList.addAll(getScopes.apply(datum.get())); } for(L label : scopeGraph.getEdgeLabels()) { for(S target : scopeGraph.getEdges(scope, label)) { final Edge<S, L> edge = new Edge<>(scope, label, target); edgeList.add(edge); } } } while(!edgeList.isEmpty()) { final Edge<S, L> edge = edgeList.pop(); if(matchedEdges.contains(edge) || missedEdges.contains(edge)) { continue; } missedEdges.__insert(edge); scopeList.add(edge.target); } } } /** * Match the scopes and schedule resulting edge matches. */ private boolean matchScopes(BiMap.Immutable<S> scopes) { seenCurrentScopes.__insertAll(scopes.keySet()); seenPreviousScopes.__insertAll(scopes.valueSet()); final BiMap<S> newMatches; if((newMatches = canScopesMatch(scopes).orElse(null)) == null) { return false; } matchedScopes.putAll(newMatches); for(Map.Entry<S, S> entry : newMatches.entrySet()) { final S currentScope = entry.getKey(); final S previousScope = entry.getValue(); final Optional<D> currentData = current.getData(currentScope); currentData.ifPresent(d -> seenCurrentScopes.__insertAll(diffOps.getCurrentScopes(d))); final Optional<D> previousData = previous.getData(previousScope); previousData.ifPresent(d -> seenPreviousScopes.__insertAll(diffOps.getPreviousScopes(d))); for(L label : labels) { scheduleEdgeMatches(currentScope, previousScope, label); } } return true; } /** * Check if scopes can match under the current match, and return the new matches it would introduce. */ private Optional<BiMap.Immutable<S>> canScopesMatch(BiMap.Immutable<S> scopes) { final BiMap.Transient<S> newMatches = BiMap.Transient.of(); for(Map.Entry<S, S> entry : scopes.entrySet()) { final S currentScope = entry.getKey(); final S previousScope = entry.getValue(); if(!diffOps.isMatchAllowed(currentScope, previousScope)) { return Optional.empty(); } else if(!matchedScopes.canPut(currentScope, previousScope)) { return Optional.empty(); } else if(matchedScopes.containsEntry(currentScope, previousScope)) { // skip this pair as it was already matched } else { newMatches.put(currentScope, previousScope); } } return Optional.of(newMatches.freeze()); } /** * Schedule edge matches from the given source scopes. */ private void scheduleEdgeMatches(S currentSource, S previousSource, L label) { final Set.Immutable<Edge<S, L>> currentEdges = Streams.stream(current.getEdges(currentSource, label)) .map(currentTarget -> new Edge<>(currentSource, label, currentTarget)) .collect(CapsuleCollectors.toSet()); this.seenCurrentEdges.__insertAll(currentEdges); final Set.Immutable<Edge<S, L>> previousEdges = Streams.stream(previous.getEdges(previousSource, label)) .map(previousTarget -> new Edge<>(previousSource, label, previousTarget)) .collect(CapsuleCollectors.toSet()); this.seenPreviousEdges.__insertAll(previousEdges); for(Edge<S, L> currentEdge : currentEdges) { final Map.Transient<Edge<S, L>, BiMap.Immutable<S>> matchingPreviousEdges = Map.Transient.of(); for(Edge<S, L> previousEdge : previousEdges) { final BiMap.Immutable<S> req; if((req = matchScopes(currentEdge.target, previousEdge.target).orElse(null)) != null) { if(!canScopesMatch(req).isPresent()) { // already discard options that are not possible under current matches continue; } matchingPreviousEdges.__put(previousEdge, req); } } final EdgeMatch match = new EdgeMatch(currentEdge, matchingPreviousEdges.freeze()); worklist.add(match); } } private void matchEdge(Edge<S, L> currentEdge, Map.Immutable<Edge<S, L>, BiMap.Immutable<S>> previousEdges) { for(Entry<Edge<S, L>, BiMap.Immutable<S>> previousEdge : previousEdges.entrySet()) { if(matchScopes(previousEdge.getValue())) { matchedEdges.put(currentEdge, previousEdge.getKey()); return; } } } /** * Compute the patch required to match two scopes and their data. */ private Optional<BiMap.Immutable<S>> matchScopes(S currentScope, S previousScope) { final BiMap.Transient<S> _matches = BiMap.Transient.of(); if(!scopeMatch(currentScope, previousScope, _matches)) { return Optional.empty(); } final BiMap.Immutable<S> matches = _matches.freeze(); return Optional.of(matches); } private boolean scopeMatch(S currentScope, S previousScope, BiMap.Transient<S> req) { if(!diffOps.isMatchAllowed(currentScope, previousScope)) { return false; } if(!req.canPut(currentScope, previousScope)) { return false; } if(req.containsEntry(currentScope, previousScope)) { return true; } req.put(currentScope, previousScope); final Optional<D> currentData = current.getData(currentScope); final Optional<D> previousData = previous.getData(previousScope); if(currentData.isPresent() != previousData.isPresent()) { return false; } if(currentData.isPresent() && previousData.isPresent()) { if(!diffOps.matchDatums(currentData.get(), previousData.get(), (leftScope, rightScope) -> scopeMatch(leftScope, rightScope, req))) { // data cannot be matched return false; } } return true; } public static <S, L, D> ScopeGraphDiff<S, L, D> diff(S s0, IScopeGraph.Immutable<S, L, D> current, IScopeGraph.Immutable<S, L, D> previous, ScopeGraphDifferOps<S, D> diffOps) { return new ScopeGraphDiffer<>(s0, current, previous, diffOps).doDiff(); } private class EdgeMatch implements Comparable<EdgeMatch> { public final Edge<S, L> currentEdge; public final Map.Immutable<Edge<S, L>, BiMap.Immutable<S>> previousEdges; public EdgeMatch(Edge<S, L> currentEdge, Map.Immutable<Edge<S, L>, BiMap.Immutable<S>> previousEdges) { this.currentEdge = currentEdge; this.previousEdges = previousEdges; } @Override public int compareTo(EdgeMatch that) { return this.previousEdges.size() - that.previousEdges.size(); } @Override public String toString() { return currentEdge + " ~ " + previousEdges; } } }
package cn.elwy.eplus.framework.support; import java.util.Map; /** * @author huangsq * @version 1.0, 2018-02-19 */ public class Group { private String name; private Map<String, Item> items; public Group(String name, Map<String, Item> items) { super(); this.name = name; this.items = items; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Map<String, Item> getItems() { return items; } public void setItems(Map<String, Item> items) { this.items = items; } }
/** * Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved. * This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. */ package com.oracle.bmc.cloudguard.requests; import com.oracle.bmc.cloudguard.model.*; /** * <b>Example: </b>Click <a href="https://docs.cloud.oracle.com/en-us/iaas/tools/java-sdk-examples/latest/cloudguard/RequestSummarizedSecurityScoresExample.java.html" target="_blank" rel="noopener noreferrer">here</a> to see how to use RequestSummarizedSecurityScoresRequest. */ @javax.annotation.Generated(value = "OracleSDKGenerator", comments = "API Version: 20200131") @lombok.Builder(builderClassName = "Builder", buildMethodName = "buildWithoutInvocationCallback") @lombok.Getter public class RequestSummarizedSecurityScoresRequest extends com.oracle.bmc.requests.BmcRequest<java.lang.Void> { /** * The ID of the compartment in which to list resources. */ private String compartmentId; /** * The maximum number of items to return. */ private Integer limit; /** * The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call. */ private String page; /** * The client request ID for tracing. */ private String opcRequestId; public static class Builder implements com.oracle.bmc.requests.BmcRequest.Builder< RequestSummarizedSecurityScoresRequest, java.lang.Void> { private com.oracle.bmc.util.internal.Consumer<javax.ws.rs.client.Invocation.Builder> invocationCallback = null; private com.oracle.bmc.retrier.RetryConfiguration retryConfiguration = null; /** * Set the invocation callback for the request to be built. * @param invocationCallback the invocation callback to be set for the request * @return this builder instance */ public Builder invocationCallback( com.oracle.bmc.util.internal.Consumer<javax.ws.rs.client.Invocation.Builder> invocationCallback) { this.invocationCallback = invocationCallback; return this; } /** * Set the retry configuration for the request to be built. * @param retryConfiguration the retry configuration to be used for the request * @return this builder instance */ public Builder retryConfiguration( com.oracle.bmc.retrier.RetryConfiguration retryConfiguration) { this.retryConfiguration = retryConfiguration; return this; } /** * Copy method to populate the builder with values from the given instance. * @return this builder instance */ public Builder copy(RequestSummarizedSecurityScoresRequest o) { compartmentId(o.getCompartmentId()); limit(o.getLimit()); page(o.getPage()); opcRequestId(o.getOpcRequestId()); invocationCallback(o.getInvocationCallback()); retryConfiguration(o.getRetryConfiguration()); return this; } /** * Build the instance of RequestSummarizedSecurityScoresRequest as configured by this builder * * Note that this method takes calls to {@link Builder#invocationCallback(com.oracle.bmc.util.internal.Consumer)} into account, * while the method {@link Builder#buildWithoutInvocationCallback} does not. * * This is the preferred method to build an instance. * * @return instance of RequestSummarizedSecurityScoresRequest */ public RequestSummarizedSecurityScoresRequest build() { RequestSummarizedSecurityScoresRequest request = buildWithoutInvocationCallback(); request.setInvocationCallback(invocationCallback); request.setRetryConfiguration(retryConfiguration); return request; } } }
/* * Copyright 2002-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.service.invoker; import java.net.URI; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; import org.springframework.http.HttpMethod; import org.springframework.http.MediaType; import org.springframework.web.util.UriComponentsBuilder; import static java.nio.charset.StandardCharsets.UTF_8; import static org.assertj.core.api.Assertions.assertThat; /** * Unit tests for {@link HttpRequestValues}. * * @author Rossen Stoyanchev */ public class HttpRequestValuesTests { @Test void defaultUri() { HttpRequestValues requestValues = HttpRequestValues.builder(HttpMethod.GET).build(); assertThat(requestValues.getUri()).isNull(); assertThat(requestValues.getUriTemplate()).isEqualTo(""); } @ParameterizedTest @ValueSource(strings = {"POST", "PUT", "PATCH"}) void requestParamAsFormData(String httpMethod) { HttpRequestValues requestValues = HttpRequestValues.builder(HttpMethod.valueOf(httpMethod)) .setContentType(MediaType.APPLICATION_FORM_URLENCODED) .addRequestParameter("param1", "1st value") .addRequestParameter("param2", "2nd value A", "2nd value B") .build(); Object body = requestValues.getBodyValue(); assertThat(body).isNotNull().isInstanceOf(byte[].class); assertThat(new String((byte[]) body, UTF_8)).isEqualTo("param1=1st+value&param2=2nd+value+A&param2=2nd+value+B"); } @Test void requestParamAsQueryParamsInUriTemplate() { HttpRequestValues requestValues = HttpRequestValues.builder(HttpMethod.POST) .setUriTemplate("/path") .addRequestParameter("param1", "1st value") .addRequestParameter("param2", "2nd value A", "2nd value B") .build(); String uriTemplate = requestValues.getUriTemplate(); assertThat(uriTemplate).isNotNull(); assertThat(uriTemplate) .isEqualTo("/path?" + "{queryParam0}={queryParam0[0]}&" + "{queryParam1}={queryParam1[0]}&" + "{queryParam1}={queryParam1[1]}"); assertThat(requestValues.getUriVariables()) .containsOnlyKeys("queryParam0", "queryParam1", "queryParam0[0]", "queryParam1[0]", "queryParam1[1]") .containsEntry("queryParam0", "param1") .containsEntry("queryParam1", "param2") .containsEntry("queryParam0[0]", "1st value") .containsEntry("queryParam1[0]", "2nd value A") .containsEntry("queryParam1[1]", "2nd value B"); URI uri = UriComponentsBuilder.fromUriString(uriTemplate) .encode() .build(requestValues.getUriVariables()); assertThat(uri.toString()) .isEqualTo("/path?param1=1st%20value&param2=2nd%20value%20A&param2=2nd%20value%20B"); } @Test void requestParamAsQueryParamsInUri() { HttpRequestValues requestValues = HttpRequestValues.builder(HttpMethod.POST) .setUri(URI.create("/path")) .addRequestParameter("param1", "1st value") .addRequestParameter("param2", "2nd value A", "2nd value B") .build(); assertThat(requestValues.getUri().toString()) .isEqualTo("/path?param1=1st%20value&param2=2nd%20value%20A&param2=2nd%20value%20B"); } }
/* * Copyright 2015-2017 Alexandr Evstigneev * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package editor; import com.perl5.lang.perl.idea.codeInsight.Perl5CodeInsightSettings; public class PerlTypedHandlerTest extends PerlTypingTestCase { @Override protected String getTestDataPath() { return "testData/smartKeys/perl"; } public void testSmartHashLonger() { doTestSmartCommaSequence(); } public void testSmartHashShorter() { doTestSmartCommaSequence(); } public void testSmartHashDisabled() { doTestSmartCommaSequence(false); } private void doTestSmartCommaSequence() { doTestSmartCommaSequence(true); } private void doTestSmartCommaSequence(boolean enabled) { Perl5CodeInsightSettings.getInstance().SMART_COMMA_SEQUENCE_TYPING = enabled; doTest(" "); } public void testDoubleColonInUseEnabled() { Perl5CodeInsightSettings.getInstance().AUTO_INSERT_COLON = true; doTest("use Mojolicious<caret>", ":", "use Mojolicious::<caret>"); } public void testDoubleColonInUseDisabled() { Perl5CodeInsightSettings.getInstance().AUTO_INSERT_COLON = false; doTest("use Mojolicious<caret>", ":", "use Mojolicious:<caret>"); } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.stream.impl.reducers; import com.hazelcast.core.IList; import com.hazelcast.jet.core.DAG; import com.hazelcast.jet.core.Vertex; import com.hazelcast.jet.core.processor.SinkProcessors; import com.hazelcast.jet.stream.DistributedCollector.Reducer; import com.hazelcast.jet.stream.impl.pipeline.Pipe; import com.hazelcast.jet.stream.impl.pipeline.StreamContext; import com.hazelcast.jet.stream.impl.processor.AnyMatchP; import java.util.function.Predicate; import static com.hazelcast.jet.core.Edge.between; import static com.hazelcast.jet.stream.impl.StreamUtil.executeJob; import static com.hazelcast.jet.stream.impl.StreamUtil.uniqueListName; public class AnyMatchReducer<T> implements Reducer<T, Boolean> { private final Predicate<? super T> predicate; public AnyMatchReducer(Predicate<? super T> predicate) { this.predicate = predicate; } @Override public Boolean reduce(StreamContext context, Pipe<? extends T> upstream) { String listName = uniqueListName(); DAG dag = new DAG(); Vertex previous = upstream.buildDAG(dag); Vertex anyMatch = dag.newVertex("any-match", () -> new AnyMatchP<>(predicate)); Vertex writer = dag.newVertex("write-" + listName, SinkProcessors.writeListP(listName)); dag.edge(between(previous, anyMatch)) .edge(between(anyMatch, writer)); executeJob(context, dag); IList<Boolean> results = context.getJetInstance().getList(listName); boolean result = anyMatch(results); results.destroy(); return result; } private static boolean anyMatch(IList<Boolean> results) { for (Boolean result : results) { if (result) { return true; } } return false; } }
package in.srain.cube.image.iface; import in.srain.cube.image.ImageLoadRequest; public interface NameGenerator { /** * @param request * @return */ public String generateIdentityUrlFor(ImageLoadRequest request); }
package com.example.forjwebapp.module.user.service; import com.example.forjwebapp.module.user.dto.SignIn; import com.example.forjwebapp.module.user.dto.SignUp; public interface UserService { public String saveUserData(SignUp.Request signUpRequestDto); public boolean isExistsUserByEmail(String email); public boolean userLogin(SignIn.Request signInRequestDto); public SignUp.Request getUserData(String username); }
/* Copyright (c) 2001-2009, The HSQL Development Group * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the HSQL Development Group nor the names of its * contributors may be used to endorse or promote products derived from this * software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG, * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hsqldb_voltpatches; import org.hsqldb_voltpatches.result.Result; import org.hsqldb_voltpatches.types.Type; import org.hsqldb_voltpatches.result.ResultConstants; import org.hsqldb_voltpatches.persist.PersistentStore; import org.hsqldb_voltpatches.lib.HashMappedList; import org.hsqldb_voltpatches.lib.ArrayUtil; import org.hsqldb_voltpatches.navigator.RowSetNavigator; import org.hsqldb_voltpatches.navigator.RowSetNavigatorClient; import org.hsqldb_voltpatches.navigator.RowSetNavigatorLinkedList; public class StatementResultUpdate extends StatementDML { int actionType; Type[] types; StatementResultUpdate() { super(); isTransactionStatement = true; } public String describe(Session session) { return ""; } public Result execute(Session session) { try { return getResult(session); } catch (HsqlException e) { return Result.newErrorResult(e); } } Result getResult(Session session) { checkAccessRights(session); Object[] args = session.sessionContext.dynamicArguments; switch (actionType) { case ResultConstants.UPDATE_CURSOR : { Long id = (Long) args[args.length - 1]; PersistentStore store = session.sessionData.getRowStore(baseTable); Row row = (Row) store.get((int) id.longValue(), false); HashMappedList list = new HashMappedList(); Object[] data = (Object[]) ArrayUtil.duplicateArray(row.getData()); for (int i = 0; i < baseColumnMap.length; i++) { if (types[i] == Type.SQL_ALL_TYPES) { continue; } data[baseColumnMap[i]] = args[i]; } list.add(row, data); update(session, baseTable, list); break; } case ResultConstants.DELETE_CURSOR : { Long id = (Long) args[args.length - 1]; PersistentStore store = session.sessionData.getRowStore(baseTable); Row row = (Row) store.get((int) id.longValue(), false); RowSetNavigator navigator = new RowSetNavigatorLinkedList(); navigator.add(row); delete(session, baseTable, navigator); break; } case ResultConstants.INSERT_CURSOR : { Object[] data = baseTable.getNewRowData(session); for (int i = 0; i < data.length; i++) { data[baseColumnMap[i]] = args[i]; } PersistentStore store = session.sessionData.getRowStore(baseTable); baseTable.insertRow(session, store, data); } } return Result.updateOneResult; } void setRowActionProperties(int action, Table table, Type[] types, int[] columnMap) { this.actionType = action; this.baseTable = table; this.types = types; this.baseColumnMap = columnMap; } /* Result result = getAccessRightsResult(session); if (result != null) { return result; } if (this.isExplain) { return Result.newSingleColumnStringResult("OPERATION", describe(session)); } try { materializeSubQueries(session, args); result = getResult(session); } catch (Throwable t) { String commandString = sql; if (session.database.getProperties().getErrorLevel() == HsqlDatabaseProperties.NO_MESSAGE) { commandString = null; } result = Result.newErrorResult(t, commandString); if (result.isError()) { result.getException().setStatementType(group, type); } } session.sessionContext.clearStructures(this); return result; */ /* long id = cmd.getResultId(); int actionType = cmd.getActionType(); Result result = sessionData.getDataResult(id); Object[] pvals = cmd.getParameterData(); Type[] types = cmd.metaData.columnTypes; StatementQuery statement = (StatementQuery) result.getValueObject() ; QueryExpression qe = statement.queryExpression; Table baseTable = qe.getBaseTable(); int[] columnMap = qe.getBaseTableColumnMap(); switch (actionType) { case ResultConstants.UPDATE_CURSOR : case ResultConstants.DELETE_CURSOR : case ResultConstants.INSERT_CURSOR : } return Result.updateZeroResult; */ void checkAccessRights(Session session) { switch (type) { case StatementTypes.CALL : { break; } case StatementTypes.INSERT : { session.getGrantee().checkInsert(targetTable, insertCheckColumns); break; } case StatementTypes.SELECT_CURSOR : break; case StatementTypes.DELETE_WHERE : { session.getGrantee().checkDelete(targetTable); break; } case StatementTypes.UPDATE_WHERE : { session.getGrantee().checkUpdate(targetTable, updateCheckColumns); break; } case StatementTypes.MERGE : { session.getGrantee().checkInsert(targetTable, insertCheckColumns); session.getGrantee().checkUpdate(targetTable, updateCheckColumns); break; } } } }
/** * iBizSys 5.0 机器人生产代码(不要直接修改当前代码) * http://www.ibizsys.net */ package com.sa.unip.srv.ywsp.demodel; import net.ibizsys.paas.core.DEDataQuery; import net.ibizsys.paas.core.DEDataQueryCodes; import net.ibizsys.paas.core.DEDataQueryCode; import net.ibizsys.paas.core.DEDataQueryCodeExp; import net.ibizsys.paas.core.DEDataQueryCodeCond; @DEDataQuery(id="2869E093-3A95-4439-BE52-76F7E0E2194F",name="YCDJ") @DEDataQueryCodes({ @DEDataQueryCode(querycode="SELECT t1.`CHSJ`, t1.`CHZT`, t1.`CREATEDATE`, t1.`CREATEMAN`, t1.`OA_CHRYID`, t1.`OA_CHRYNAME`, t21.`ORGSECUSERTYPENAME`, t1.`ORGUSERID`, t21.`ORGUSERNAME`, t1.`UPDATEDATE`, t1.`UPDATEMAN`, t1.`YBHYSQID`, t11.`OA_YBHYSQNAME` AS `YBHYSQNAME`, t1.`ZWH`, t1.`ZWHMS` FROM `T_OA_CHRY` t1 LEFT JOIN T_OA_YBHYSQ t11 ON t1.YBHYSQID = t11.OA_YBHYSQID LEFT JOIN T_SRFORGUSER t21 ON t1.ORGUSERID = t21.ORGUSERID ",querycodetemp="",declarecode="",dbtype="MYSQL5", fieldexps={ @DEDataQueryCodeExp(name="CHSJ",expression="t1.`CHSJ`") ,@DEDataQueryCodeExp(name="CHZT",expression="t1.`CHZT`") ,@DEDataQueryCodeExp(name="CREATEDATE",expression="t1.`CREATEDATE`") ,@DEDataQueryCodeExp(name="CREATEMAN",expression="t1.`CREATEMAN`") ,@DEDataQueryCodeExp(name="OA_CHRYID",expression="t1.`OA_CHRYID`") ,@DEDataQueryCodeExp(name="OA_CHRYNAME",expression="t1.`OA_CHRYNAME`") ,@DEDataQueryCodeExp(name="ORGSECUSERTYPENAME",expression="t21.`ORGSECUSERTYPENAME`") ,@DEDataQueryCodeExp(name="ORGUSERID",expression="t1.`ORGUSERID`") ,@DEDataQueryCodeExp(name="ORGUSERNAME",expression="t21.`ORGUSERNAME`") ,@DEDataQueryCodeExp(name="UPDATEDATE",expression="t1.`UPDATEDATE`") ,@DEDataQueryCodeExp(name="UPDATEMAN",expression="t1.`UPDATEMAN`") ,@DEDataQueryCodeExp(name="YBHYSQID",expression="t1.`YBHYSQID`") ,@DEDataQueryCodeExp(name="YBHYSQNAME",expression="t11.`OA_YBHYSQNAME`") ,@DEDataQueryCodeExp(name="ZWH",expression="t1.`ZWH`") ,@DEDataQueryCodeExp(name="ZWHMS",expression="t1.`ZWHMS`") }, conds={ @DEDataQueryCodeCond(condition="( t1.`CHZT` = '40' )") }) }) /** * 实体数据查询 [已迟到人数统计]模型基类 */ public class OA_CHRYYCDJDQModel extends net.ibizsys.paas.demodel.DEDataQueryModelBase { public OA_CHRYYCDJDQModel() { super(); this.initAnnotation(OA_CHRYYCDJDQModel.class); } }
/* * Copyright ConsenSys AG. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * SPDX-License-Identifier: Apache-2.0 */ package org.hyperledger.besu.ethereum.core; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static org.hyperledger.besu.crypto.Hash.keccak256; import org.hyperledger.besu.crypto.KeyPair; import org.hyperledger.besu.crypto.SECPPublicKey; import org.hyperledger.besu.crypto.SECPSignature; import org.hyperledger.besu.crypto.SignatureAlgorithm; import org.hyperledger.besu.crypto.SignatureAlgorithmFactory; import org.hyperledger.besu.datatypes.Address; import org.hyperledger.besu.datatypes.Hash; import org.hyperledger.besu.datatypes.Wei; import org.hyperledger.besu.ethereum.core.encoding.TransactionDecoder; import org.hyperledger.besu.ethereum.core.encoding.TransactionEncoder; import org.hyperledger.besu.ethereum.rlp.RLP; import org.hyperledger.besu.ethereum.rlp.RLPInput; import org.hyperledger.besu.ethereum.rlp.RLPOutput; import org.hyperledger.besu.ethereum.transaction.GoQuorumPrivateTransactionDetector; import org.hyperledger.besu.evm.AccessListEntry; import org.hyperledger.besu.plugin.data.Quantity; import org.hyperledger.besu.plugin.data.TransactionType; import java.math.BigInteger; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.stream.Stream; import org.apache.tuweni.bytes.Bytes; import org.apache.tuweni.bytes.Bytes32; import org.apache.tuweni.units.bigints.UInt256; /** An operation submitted by an external actor to be applied to the system. */ public class Transaction implements org.hyperledger.besu.plugin.data.Transaction, org.hyperledger.besu.plugin.data.UnsignedPrivateMarkerTransaction { // Used for transactions that are not tied to a specific chain // (e.g. does not have a chain id associated with it). public static final BigInteger REPLAY_UNPROTECTED_V_BASE = BigInteger.valueOf(27); public static final BigInteger REPLAY_UNPROTECTED_V_BASE_PLUS_1 = BigInteger.valueOf(28); public static final BigInteger REPLAY_PROTECTED_V_BASE = BigInteger.valueOf(35); public static final BigInteger GO_QUORUM_PRIVATE_TRANSACTION_V_VALUE_MIN = BigInteger.valueOf(37); public static final BigInteger GO_QUORUM_PRIVATE_TRANSACTION_V_VALUE_MAX = BigInteger.valueOf(38); // The v signature parameter starts at 36 because 1 is the first valid chainId so: // chainId > 1 implies that 2 * chainId + V_BASE > 36. public static final BigInteger REPLAY_PROTECTED_V_MIN = BigInteger.valueOf(36); public static final BigInteger TWO = BigInteger.valueOf(2); private final long nonce; private final Optional<Wei> gasPrice; private final Optional<Wei> maxPriorityFeePerGas; private final Optional<Wei> maxFeePerGas; private final long gasLimit; private final Optional<Address> to; private final Wei value; private final SECPSignature signature; private final Bytes payload; private final Optional<List<AccessListEntry>> maybeAccessList; private final Optional<BigInteger> chainId; private final Optional<BigInteger> v; // Caches a "hash" of a portion of the transaction used for sender recovery. // Note that this hash does not include the transaction signature so it does not // fully identify the transaction (use the result of the {@code hash()} for that). // It is only used to compute said signature and recover the sender from it. private volatile Bytes32 hashNoSignature; // Caches the transaction sender. protected volatile Address sender; // Caches the hash used to uniquely identify the transaction. protected volatile Hash hash; private final TransactionType transactionType; private final SignatureAlgorithm signatureAlgorithm = SignatureAlgorithmFactory.getInstance(); public static Builder builder() { return new Builder(); } public static Transaction readFrom(final Bytes rlpBytes) { return readFrom(RLP.input(rlpBytes)); } public static Transaction readFrom(final RLPInput rlpInput) { return TransactionDecoder.decodeForWire(rlpInput); } /** * Instantiates a transaction instance. * * @param transactionType the transaction type * @param nonce the nonce * @param gasPrice the gas price * @param maxPriorityFeePerGas the max priority fee per gas * @param maxFeePerGas the max fee per gas * @param gasLimit the gas limit * @param to the transaction recipient * @param value the value being transferred to the recipient * @param signature the signature * @param payload the payload * @param maybeAccessList the optional list of addresses/storage slots this transaction intends to * preload * @param sender the transaction sender * @param chainId the chain id to apply the transaction to * @param v the v value. This is only passed in directly for GoQuorum private transactions * (v=37|38). For all other transactions, the v value is derived from the signature. If v is * provided here, the chain id must be empty. * <p>The {@code to} will be an {@code Optional.empty()} for a contract creation transaction; * otherwise it should contain an address. * <p>The {@code chainId} must be greater than 0 to be applied to a specific chain; otherwise * it will default to any chain. */ public Transaction( final TransactionType transactionType, final long nonce, final Optional<Wei> gasPrice, final Optional<Wei> maxPriorityFeePerGas, final Optional<Wei> maxFeePerGas, final long gasLimit, final Optional<Address> to, final Wei value, final SECPSignature signature, final Bytes payload, final Optional<List<AccessListEntry>> maybeAccessList, final Address sender, final Optional<BigInteger> chainId, final Optional<BigInteger> v) { if (v.isPresent() && chainId.isPresent()) { throw new IllegalArgumentException( String.format("chainId '%s' and v '%s' cannot both be provided", chainId.get(), v.get())); } if (transactionType.requiresChainId()) { checkArgument( chainId.isPresent(), "Chain id must be present for transaction type %s", transactionType); } if (maybeAccessList.isPresent()) { checkArgument( transactionType.supportsAccessList(), "Must not specify access list for transaction not supporting it"); } if (Objects.equals(transactionType, TransactionType.ACCESS_LIST)) { checkArgument( maybeAccessList.isPresent(), "Must specify access list for access list transaction"); } this.transactionType = transactionType; this.nonce = nonce; this.gasPrice = gasPrice; this.maxPriorityFeePerGas = maxPriorityFeePerGas; this.maxFeePerGas = maxFeePerGas; this.gasLimit = gasLimit; this.to = to; this.value = value; this.signature = signature; this.payload = payload; this.maybeAccessList = maybeAccessList; this.sender = sender; this.chainId = chainId; this.v = v; } public Transaction( final long nonce, final Optional<Wei> gasPrice, final Optional<Wei> maxPriorityFeePerGas, final Optional<Wei> maxFeePerGas, final long gasLimit, final Optional<Address> to, final Wei value, final SECPSignature signature, final Bytes payload, final Address sender, final Optional<BigInteger> chainId, final Optional<BigInteger> v) { this( TransactionType.FRONTIER, nonce, gasPrice, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, signature, payload, Optional.empty(), sender, chainId, v); } public Transaction( final long nonce, final Wei gasPrice, final long gasLimit, final Address to, final Wei value, final SECPSignature signature, final Bytes payload, final Optional<BigInteger> chainId, final Optional<BigInteger> v) { this( TransactionType.FRONTIER, nonce, Optional.of(gasPrice), Optional.empty(), Optional.empty(), gasLimit, Optional.of(to), value, signature, payload, Optional.empty(), null, chainId, v); } /** * Instantiates a transaction instance. * * @param nonce the nonce * @param gasPrice the gas price * @param gasLimit the gas limit * @param to the transaction recipient * @param value the value being transferred to the recipient * @param signature the signature * @param payload the payload * @param sender the transaction sender * @param chainId the chain id to apply the transaction to * <p>The {@code to} will be an {@code Optional.empty()} for a contract creation transaction; * otherwise it should contain an address. * <p>The {@code chainId} must be greater than 0 to be applied to a specific chain; otherwise * it will default to any chain. */ public Transaction( final long nonce, final Wei gasPrice, final long gasLimit, final Optional<Address> to, final Wei value, final SECPSignature signature, final Bytes payload, final Address sender, final Optional<BigInteger> chainId) { this( nonce, Optional.of(gasPrice), Optional.empty(), Optional.empty(), gasLimit, to, value, signature, payload, sender, chainId, Optional.empty()); } /** * Instantiates a transaction instance. * * @param nonce the nonce * @param gasPrice the gas price * @param gasLimit the gas limit * @param to the transaction recipient * @param value the value being transferred to the recipient * @param signature the signature * @param payload the payload * @param sender the transaction sender * @param chainId the chain id to apply the transaction to * @param v the v value (only passed in directly for GoQuorum private transactions) * <p>The {@code to} will be an {@code Optional.empty()} for a contract creation transaction; * otherwise it should contain an address. * <p>The {@code chainId} must be greater than 0 to be applied to a specific chain; otherwise * it will default to any chain. */ public Transaction( final long nonce, final Wei gasPrice, final long gasLimit, final Optional<Address> to, final Wei value, final SECPSignature signature, final Bytes payload, final Address sender, final Optional<BigInteger> chainId, final Optional<BigInteger> v) { this( nonce, Optional.of(gasPrice), Optional.empty(), Optional.empty(), gasLimit, to, value, signature, payload, sender, chainId, v); } /** * Returns the transaction nonce. * * @return the transaction nonce */ @Override public long getNonce() { return nonce; } /** * Return the transaction gas price. * * @return the transaction gas price */ @Override public Optional<Wei> getGasPrice() { return gasPrice; } /** * Return the transaction max priority per gas. * * @return the transaction max priority per gas */ @Override public Optional<Wei> getMaxPriorityFeePerGas() { return maxPriorityFeePerGas; } /** * Return the transaction max fee per gas. * * @return the transaction max fee per gas */ @Override public Optional<Wei> getMaxFeePerGas() { return maxFeePerGas; } /** * Boolean which indicates the transaction has associated cost data, whether gas price or 1559 fee * market parameters. * * @return whether cost params are presetn */ public boolean hasCostParams() { return Arrays.asList(getGasPrice(), getMaxFeePerGas(), getMaxPriorityFeePerGas()).stream() .flatMap(Optional::stream) .map(Quantity::getAsBigInteger) .anyMatch(q -> q.longValue() > 0L); } public long getEffectivePriorityFeePerGas(final Optional<Long> maybeBaseFee) { return maybeBaseFee .map( baseFee -> { if (getType().supports1559FeeMarket()) { return Math.min( getMaxPriorityFeePerGas().get().getAsBigInteger().longValue(), getMaxFeePerGas().get().getAsBigInteger().longValue() - baseFee); } else { return getGasPrice().get().getValue().longValue() - baseFee; } }) .map( maybeNegativeEffectivePriorityFeePerGas -> Math.max(0, maybeNegativeEffectivePriorityFeePerGas)) .orElseGet(() -> getGasPrice().map(Wei::getValue).map(Number::longValue).orElse(0L)); } /** * Returns the transaction gas limit. * * @return the transaction gas limit */ @Override public long getGasLimit() { return gasLimit; } /** * Returns the transaction recipient. * * <p>The {@code Optional<Address>} will be {@code Optional.empty()} if the transaction is a * contract creation; otherwise it will contain the message call transaction recipient. * * @return the transaction recipient if a message call; otherwise {@code Optional.empty()} */ @Override public Optional<Address> getTo() { return to; } /** * Returns the value transferred in the transaction. * * @return the value transferred in the transaction */ @Override public Wei getValue() { return value; } /** * Returns the signature used to sign the transaction. * * @return the signature used to sign the transaction */ public SECPSignature getSignature() { return signature; } /** * Returns the transaction payload. * * @return the transaction payload */ @Override public Bytes getPayload() { return payload; } /** * Returns the payload if this is a contract creation transaction. * * @return if present the init code */ @Override public Optional<Bytes> getInit() { return getTo().isPresent() ? Optional.empty() : Optional.of(payload); } /** * Returns the payload if this is a message call transaction. * * @return if present the init code */ @Override public Optional<Bytes> getData() { return getTo().isPresent() ? Optional.of(payload) : Optional.empty(); } public Optional<List<AccessListEntry>> getAccessList() { return maybeAccessList; } /** * Return the transaction chain id (if it exists) * * <p>The {@code OptionalInt} will be {@code OptionalInt.empty()} if the transaction is not tied * to a specific chain. * * @return the transaction chain id if it exists; otherwise {@code OptionalInt.empty()} */ @Override public Optional<BigInteger> getChainId() { return chainId; } /** * Returns the transaction sender. * * @return the transaction sender */ @Override public Address getSender() { if (sender == null) { final SECPPublicKey publicKey = signatureAlgorithm .recoverPublicKeyFromSignature(getOrComputeSenderRecoveryHash(), signature) .orElseThrow( () -> new IllegalStateException( "Cannot recover public key from signature for " + this)); sender = Address.extract(Hash.hash(publicKey.getEncodedBytes())); } return sender; } /** * Returns the public key extracted from the signature. * * @return the public key */ public Optional<String> getPublicKey() { return signatureAlgorithm .recoverPublicKeyFromSignature(getOrComputeSenderRecoveryHash(), signature) .map(SECPPublicKey::toString); } private Bytes32 getOrComputeSenderRecoveryHash() { if (hashNoSignature == null) { hashNoSignature = computeSenderRecoveryHash( transactionType, nonce, gasPrice.orElse(null), maxPriorityFeePerGas.orElse(null), maxFeePerGas.orElse(null), gasLimit, to, value, payload, maybeAccessList, chainId); } return hashNoSignature; } /** * Writes the transaction to RLP * * @param out the output to write the transaction to */ public void writeTo(final RLPOutput out) { TransactionEncoder.encodeForWire(this, out); } @Override public BigInteger getR() { return signature.getR(); } @Override public BigInteger getS() { return signature.getS(); } @Override public BigInteger getV() { if (this.v.isPresent()) { return this.v.get(); } final BigInteger recId = BigInteger.valueOf(signature.getRecId()); if (transactionType != null && transactionType != TransactionType.FRONTIER) { // EIP-2718 typed transaction, return yParity: return recId; } else { if (chainId.isEmpty()) { return recId.add(REPLAY_UNPROTECTED_V_BASE); } else { return recId.add(REPLAY_PROTECTED_V_BASE).add(TWO.multiply(chainId.get())); } } } /** * Returns the transaction hash. * * @return the transaction hash */ @Override public Hash getHash() { if (hash == null) { hash = Hash.hash(TransactionEncoder.encodeOpaqueBytes(this)); } return hash; } /** * Returns whether the transaction is a contract creation * * @return {@code true} if this is a contract-creation transaction; otherwise {@code false} */ public boolean isContractCreation() { return getTo().isEmpty(); } /** * Calculates the up-front cost for the gas the transaction can use. * * @return the up-front cost for the gas the transaction can use. */ public Wei getUpfrontGasCost() { return getUpfrontGasCost( Stream.concat(maxFeePerGas.stream(), gasPrice.stream()) .findFirst() .orElseThrow( () -> new IllegalStateException( String.format("Transaction requires either gasPrice or maxFeePerGas")))); } /** * Calculates the up-front cost for the gas the transaction can use. * * @param gasPrice the gas price to use * @return the up-front cost for the gas the transaction can use. */ public Wei getUpfrontGasCost(final Wei gasPrice) { if (gasPrice == null || gasPrice.isZero()) { return Wei.ZERO; } return Wei.of(getGasLimit()).multiply(gasPrice); } /** * Calculates the up-front cost for the transaction. * * <p>The up-front cost is paid by the sender account before the transaction is executed. The * sender must have the amount in its account balance to execute and some of this amount may be * refunded after the transaction has executed. * * @return the up-front gas cost for the transaction */ public Wei getUpfrontCost() { return getUpfrontGasCost().add(getValue()); } @Override public TransactionType getType() { return this.transactionType; } /** * Returns whether or not the transaction is a GoQuorum private transaction. <br> * <br> * A GoQuorum private transaction has its <i>v</i> value equal to 37 or 38, and does not contain a * chainId. * * @param goQuorumCompatibilityMode true if GoQuorum compatbility mode is set * @return true if GoQuorum private transaction, false otherwise */ public boolean isGoQuorumPrivateTransaction(final boolean goQuorumCompatibilityMode) { if (!goQuorumCompatibilityMode) { return false; } if (chainId.isPresent()) { return false; } if (!v.isPresent()) { return false; } return GoQuorumPrivateTransactionDetector.isGoQuorumPrivateTransactionV(v.get()); } private static Bytes32 computeSenderRecoveryHash( final TransactionType transactionType, final long nonce, final Wei gasPrice, final Wei maxPriorityFeePerGas, final Wei maxFeePerGas, final long gasLimit, final Optional<Address> to, final Wei value, final Bytes payload, final Optional<List<AccessListEntry>> accessList, final Optional<BigInteger> chainId) { if (transactionType.requiresChainId()) { checkArgument(chainId.isPresent(), "Transaction type %s requires chainId", transactionType); } final Bytes preimage; switch (transactionType) { case FRONTIER: preimage = frontierPreimage(nonce, gasPrice, gasLimit, to, value, payload, chainId); break; case EIP1559: preimage = eip1559Preimage( nonce, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, payload, chainId, accessList); break; case ACCESS_LIST: preimage = accessListPreimage( nonce, gasPrice, gasLimit, to, value, payload, accessList.orElseThrow( () -> new IllegalStateException( "Developer error: the transaction should be guaranteed to have an access list here")), chainId); break; default: throw new IllegalStateException( "Developer error. Didn't specify signing hash preimage computation"); } return keccak256(preimage); } private static Bytes frontierPreimage( final long nonce, final Wei gasPrice, final long gasLimit, final Optional<Address> to, final Wei value, final Bytes payload, final Optional<BigInteger> chainId) { return RLP.encode( rlpOutput -> { rlpOutput.startList(); rlpOutput.writeLongScalar(nonce); rlpOutput.writeUInt256Scalar(gasPrice); rlpOutput.writeLongScalar(gasLimit); rlpOutput.writeBytes(to.map(Bytes::copy).orElse(Bytes.EMPTY)); rlpOutput.writeUInt256Scalar(value); rlpOutput.writeBytes(payload); if (chainId.isPresent()) { rlpOutput.writeBigIntegerScalar(chainId.get()); rlpOutput.writeUInt256Scalar(UInt256.ZERO); rlpOutput.writeUInt256Scalar(UInt256.ZERO); } rlpOutput.endList(); }); } private static Bytes eip1559Preimage( final long nonce, final Wei maxPriorityFeePerGas, final Wei maxFeePerGas, final long gasLimit, final Optional<Address> to, final Wei value, final Bytes payload, final Optional<BigInteger> chainId, final Optional<List<AccessListEntry>> accessList) { final Bytes encoded = RLP.encode( rlpOutput -> { rlpOutput.startList(); rlpOutput.writeBigIntegerScalar(chainId.orElseThrow()); rlpOutput.writeLongScalar(nonce); rlpOutput.writeUInt256Scalar(maxPriorityFeePerGas); rlpOutput.writeUInt256Scalar(maxFeePerGas); rlpOutput.writeLongScalar(gasLimit); rlpOutput.writeBytes(to.map(Bytes::copy).orElse(Bytes.EMPTY)); rlpOutput.writeUInt256Scalar(value); rlpOutput.writeBytes(payload); TransactionEncoder.writeAccessList(rlpOutput, accessList); rlpOutput.endList(); }); return Bytes.concatenate(Bytes.of(TransactionType.EIP1559.getSerializedType()), encoded); } private static Bytes accessListPreimage( final long nonce, final Wei gasPrice, final long gasLimit, final Optional<Address> to, final Wei value, final Bytes payload, final List<AccessListEntry> accessList, final Optional<BigInteger> chainId) { final Bytes encode = RLP.encode( rlpOutput -> { rlpOutput.startList(); TransactionEncoder.encodeAccessListInner( chainId, nonce, gasPrice, gasLimit, to, value, payload, accessList, rlpOutput); rlpOutput.endList(); }); return Bytes.concatenate(Bytes.of(TransactionType.ACCESS_LIST.getSerializedType()), encode); } @Override public boolean equals(final Object other) { if (!(other instanceof Transaction)) { return false; } final Transaction that = (Transaction) other; return Objects.equals(this.chainId, that.chainId) && this.gasLimit == that.gasLimit && Objects.equals(this.gasPrice, that.gasPrice) && Objects.equals(this.maxPriorityFeePerGas, that.maxPriorityFeePerGas) && Objects.equals(this.maxFeePerGas, that.maxFeePerGas) && this.nonce == that.nonce && Objects.equals(this.payload, that.payload) && Objects.equals(this.signature, that.signature) && Objects.equals(this.to, that.to) && Objects.equals(this.value, that.value) && Objects.equals(this.getV(), that.getV()); } @Override public int hashCode() { return Objects.hash( nonce, gasPrice, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, payload, signature, chainId, v); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append(isContractCreation() ? "ContractCreation" : "MessageCall").append("{"); sb.append("type=").append(getType()).append(", "); sb.append("nonce=").append(getNonce()).append(", "); getGasPrice() .ifPresent( gasPrice -> sb.append("gasPrice=").append(gasPrice.toShortHexString()).append(", ")); if (getMaxPriorityFeePerGas().isPresent() && getMaxFeePerGas().isPresent()) { sb.append("maxPriorityFeePerGas=") .append(getMaxPriorityFeePerGas().map(Wei::toShortHexString).get()) .append(", "); sb.append("maxFeePerGas=") .append(getMaxFeePerGas().map(Wei::toShortHexString).get()) .append(", "); } sb.append("gasLimit=").append(getGasLimit()).append(", "); if (getTo().isPresent()) sb.append("to=").append(getTo().get()).append(", "); sb.append("value=").append(getValue()).append(", "); sb.append("sig=").append(getSignature()).append(", "); if (chainId.isPresent()) sb.append("chainId=").append(getChainId().get()).append(", "); if (v.isPresent()) sb.append("v=").append(v.get()).append(", "); sb.append("payload=").append(getPayload()); if (transactionType.equals(TransactionType.ACCESS_LIST)) { sb.append(", ").append("accessList=").append(maybeAccessList); } return sb.append("}").toString(); } public Optional<Address> contractAddress() { if (isContractCreation()) { return Optional.of(Address.contractAddress(getSender(), getNonce())); } return Optional.empty(); } public static class Builder { protected TransactionType transactionType; protected long nonce = -1L; protected Wei gasPrice; protected Wei maxPriorityFeePerGas; protected Wei maxFeePerGas; protected long gasLimit = -1L; protected Optional<Address> to = Optional.empty(); protected Wei value; protected SECPSignature signature; protected Bytes payload; protected Optional<List<AccessListEntry>> accessList = Optional.empty(); protected Address sender; protected Optional<BigInteger> chainId = Optional.empty(); protected Optional<BigInteger> v = Optional.empty(); public Builder type(final TransactionType transactionType) { this.transactionType = transactionType; return this; } public Builder chainId(final BigInteger chainId) { this.chainId = Optional.of(chainId); return this; } public Builder v(final BigInteger v) { this.v = Optional.of(v); return this; } public Builder gasPrice(final Wei gasPrice) { this.gasPrice = gasPrice; return this; } public Builder maxPriorityFeePerGas(final Wei maxPriorityFeePerGas) { this.maxPriorityFeePerGas = maxPriorityFeePerGas; return this; } public Builder maxFeePerGas(final Wei maxFeePerGas) { this.maxFeePerGas = maxFeePerGas; return this; } public Builder gasLimit(final long gasLimit) { this.gasLimit = gasLimit; return this; } public Builder nonce(final long nonce) { this.nonce = nonce; return this; } public Builder value(final Wei value) { this.value = value; return this; } public Builder to(final Address to) { this.to = Optional.ofNullable(to); return this; } public Builder payload(final Bytes payload) { this.payload = payload; return this; } public Builder accessList(final List<AccessListEntry> accessList) { this.accessList = Optional.ofNullable(accessList); return this; } public Builder sender(final Address sender) { this.sender = sender; return this; } public Builder signature(final SECPSignature signature) { this.signature = signature; return this; } public Builder guessType() { if (maxPriorityFeePerGas != null || maxFeePerGas != null) { transactionType = TransactionType.EIP1559; } else if (accessList.isPresent()) { transactionType = TransactionType.ACCESS_LIST; } else { transactionType = TransactionType.FRONTIER; } return this; } public TransactionType getTransactionType() { return transactionType; } public Transaction build() { if (transactionType == null) guessType(); return new Transaction( transactionType, nonce, Optional.ofNullable(gasPrice), Optional.ofNullable(maxPriorityFeePerGas), Optional.ofNullable(maxFeePerGas), gasLimit, to, value, signature, payload, accessList, sender, chainId, v); } public Transaction signAndBuild(final KeyPair keys) { checkState( signature == null, "The transaction signature has already been provided to this builder"); signature(computeSignature(keys)); sender(Address.extract(Hash.hash(keys.getPublicKey().getEncodedBytes()))); return build(); } SECPSignature computeSignature(final KeyPair keys) { return SignatureAlgorithmFactory.getInstance() .sign( computeSenderRecoveryHash( transactionType, nonce, gasPrice, maxPriorityFeePerGas, maxFeePerGas, gasLimit, to, value, payload, accessList, chainId), keys); } } /** * Calculates the effectiveGasPrice of a transaction on the basis of an {@code Optional<Long>} * baseFee and handles unwrapping Optional fee parameters. If baseFee is present, effective gas is * calculated as: * * <p>min((baseFeePerGas + maxPriorityFeePerGas), maxFeePerGas) * * <p>Otherwise, return gasPrice for legacy transactions. * * @param baseFeePerGas optional baseFee from the block header, if we are post-london * @return the effective gas price. */ public final Wei getEffectiveGasPrice(final Optional<Long> baseFeePerGas) { return Wei.of(getEffectivePriorityFeePerGas(baseFeePerGas) + baseFeePerGas.orElse(0L)); } }
package org.infernus.idea.checkstyle.build; import java.io.File; import java.math.BigDecimal; import java.util.stream.Collectors; import org.gradle.api.Project; import org.gradle.api.artifacts.ConfigurationContainer; import org.gradle.api.file.FileCollection; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.SourceSetContainer; import org.gradle.api.tasks.TaskContainer; import org.gradle.api.tasks.TaskProvider; import org.gradle.language.base.plugins.LifecycleBasePlugin; import org.gradle.testing.jacoco.plugins.JacocoPluginExtension; import org.gradle.testing.jacoco.plugins.JacocoTaskExtension; import org.gradle.testing.jacoco.tasks.JacocoCoverageVerification; import org.gradle.testing.jacoco.tasks.JacocoReport; import org.gradle.testing.jacoco.tasks.JacocoReportBase; import org.gradle.testing.jacoco.tasks.rules.JacocoViolationRule; public class CustomSourceSetCreator { static final String CSACCESS_SOURCESET_NAME = "csaccess"; public static final String CSACCESSTEST_SOURCESET_NAME = "csaccessTest"; private static final double MINIMUM_CSACCESS_COVERAGE = 0.70d; private final Project project; public CustomSourceSetCreator(final Project project) { this.project = project; } private String getJacocoReportTaskName() { final SourceSetContainer sourceSets = (SourceSetContainer) project.getProperties().get("sourceSets"); return sourceSets.getByName(CSACCESS_SOURCESET_NAME).getTaskName("jacoco", "report"); } private String getJacocoVerificationTaskName() { final SourceSetContainer sourceSets = (SourceSetContainer) project.getProperties().get("sourceSets"); return sourceSets.getByName(CSACCESS_SOURCESET_NAME).getTaskName("jacoco", "CoverageVerification"); } public void establishCsAccessSourceSet() { final SourceSetContainer sourceSets = (SourceSetContainer) project.getProperties().get("sourceSets"); final SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME); // Create the 'csaccess' source set final SourceSet csaccessSourceSet = sourceSets.create(CSACCESS_SOURCESET_NAME); csaccessSourceSet.setCompileClasspath(csaccessSourceSet.getCompileClasspath().plus(mainSourceSet.getOutput())); csaccessSourceSet.setRuntimeClasspath(csaccessSourceSet.getRuntimeClasspath().plus(mainSourceSet.getOutput())); // Derive all its configurations from 'main', so 'csaccess' code can see 'main' code final ConfigurationContainer configurations = project.getConfigurations(); configurations.getByName(csaccessSourceSet.getImplementationConfigurationName()) .extendsFrom(configurations.getByName(mainSourceSet.getImplementationConfigurationName())); configurations.getByName(csaccessSourceSet.getCompileOnlyConfigurationName()) .extendsFrom(configurations.getByName(mainSourceSet.getCompileOnlyConfigurationName())); configurations.getByName(csaccessSourceSet.getCompileClasspathConfigurationName()) .extendsFrom(configurations.getByName(mainSourceSet.getCompileClasspathConfigurationName())); configurations.getByName(csaccessSourceSet.getRuntimeOnlyConfigurationName()) .extendsFrom(configurations.getByName(mainSourceSet.getRuntimeOnlyConfigurationName())); // Wire task dependencies to match the classpath dependencies (arrow means "depends on"): // - compileTestJava -> compileCsaccessJava // - testClasses -> csaccessClasses // - jar -> csaccessClasses final TaskContainer tasks = project.getTasks(); tasks.getByName(JavaPlugin.COMPILE_TEST_JAVA_TASK_NAME) .dependsOn(tasks.getByName(csaccessSourceSet.getCompileJavaTaskName())); tasks.getByName(JavaPlugin.TEST_CLASSES_TASK_NAME) .dependsOn(tasks.getByName(csaccessSourceSet.getClassesTaskName())); tasks.getByName(JavaPlugin.JAR_TASK_NAME) .dependsOn(tasks.getByName(csaccessSourceSet.getClassesTaskName())); } public void establishCsAccessTestSourceSet() { final SourceSetContainer sourceSets = (SourceSetContainer) project.getProperties().get("sourceSets"); final SourceSet mainSourceSet = sourceSets.getByName(SourceSet.MAIN_SOURCE_SET_NAME); final SourceSet csaccessSourceSet = sourceSets.getByName(CSACCESS_SOURCESET_NAME); final SourceSet testSourceSet = sourceSets.getByName(SourceSet.TEST_SOURCE_SET_NAME); // Create the 'csaccessTest' source set final SourceSet csaccessTestSourceSet = sourceSets.create(CSACCESSTEST_SOURCESET_NAME); csaccessTestSourceSet.setCompileClasspath(csaccessTestSourceSet.getCompileClasspath(). plus(mainSourceSet.getOutput()).plus(csaccessSourceSet.getOutput())); csaccessTestSourceSet.setRuntimeClasspath(csaccessTestSourceSet.getRuntimeClasspath(). plus(mainSourceSet.getOutput()).plus(csaccessSourceSet.getOutput())); // Derive all its configurations from 'test' and 'csaccess' final ConfigurationContainer configurations = project.getConfigurations(); configurations.getByName(csaccessTestSourceSet.getImplementationConfigurationName()).extendsFrom( configurations.getByName(csaccessSourceSet.getImplementationConfigurationName()), configurations.getByName(testSourceSet.getImplementationConfigurationName())); configurations.getByName(csaccessTestSourceSet.getCompileOnlyConfigurationName()).extendsFrom( configurations.getByName(csaccessSourceSet.getCompileOnlyConfigurationName()), configurations.getByName(testSourceSet.getCompileOnlyConfigurationName())); configurations.getByName(csaccessTestSourceSet.getCompileClasspathConfigurationName()).extendsFrom( configurations.getByName(csaccessSourceSet.getCompileClasspathConfigurationName()), configurations.getByName(testSourceSet.getCompileClasspathConfigurationName())); configurations.getByName(csaccessTestSourceSet.getRuntimeOnlyConfigurationName()).extendsFrom( configurations.getByName(csaccessSourceSet.getRuntimeOnlyConfigurationName()), configurations.getByName(testSourceSet.getRuntimeOnlyConfigurationName())); // Wire task dependencies to match the classpath dependencies (arrow means "depends on"): // - compileCsaccessTestJava -> compileCsaccessJava // - csaccessTestClasses -> csaccessClasses final TaskContainer tasks = project.getTasks(); tasks.getByName(csaccessTestSourceSet.getCompileJavaTaskName()) .dependsOn(tasks.getByName(csaccessSourceSet.getCompileJavaTaskName())); tasks.getByName(csaccessTestSourceSet.getClassesTaskName()) .dependsOn(tasks.getByName(csaccessSourceSet.getClassesTaskName())); } public void setupCoverageVerification() { final TaskContainer tasks = project.getTasks(); // Disable JaCoCo for 'test' source set final JacocoTaskExtension jacocoTestTaskExtension = (JacocoTaskExtension) tasks.getByName( JavaPlugin.TEST_TASK_NAME).getExtensions().getByName(JacocoPluginExtension.TASK_EXTENSION_NAME); jacocoTestTaskExtension.setEnabled(false); tasks.getByName("jacocoTestReport").setEnabled(false); tasks.getByName("jacocoTestCoverageVerification").setEnabled(false); // Enable JaCoCo reporting for 'csaccess' source set final String reportTaskName = getJacocoReportTaskName(); TaskProvider<JacocoReport> provider = tasks.register(reportTaskName, JacocoReport.class); provider.configure((JacocoReport jacocoReportTask) -> { jacocoReportTask.dependsOn(tasks.getByName(CsaccessTestTask.NAME), tasks.getByName(CsaccessTestTask.XTEST_TASK_NAME)); jacocoReportTask.setDescription("Generate exclusive JaCoCo test report on the '" + CSACCESS_SOURCESET_NAME + "' classes"); configureJacocoTask(jacocoReportTask); jacocoReportTask.getReports().getXml().getRequired().set(true); jacocoReportTask.getReports().getCsv().getRequired().set(false); jacocoReportTask.getReports().getHtml().getRequired().set(true); }); // Verify minimum line coverage for 'csaccess' source set final String verificationTaskName = getJacocoVerificationTaskName(); TaskProvider<JacocoCoverageVerification> provider2 = tasks.register(verificationTaskName, JacocoCoverageVerification.class); provider2.configure((JacocoCoverageVerification jacocoVerificationTask) -> { jacocoVerificationTask.dependsOn(reportTaskName); jacocoVerificationTask.setDescription("Ensure that '" + CSACCESS_SOURCESET_NAME + "' test coverage does not drop below a certain level"); configureJacocoTask(jacocoVerificationTask); jacocoVerificationTask.getViolationRules().rule((final JacocoViolationRule rule) -> rule.limit(jacocoLimit -> jacocoLimit.setMinimum(BigDecimal.valueOf(MINIMUM_CSACCESS_COVERAGE)))); }); // Wire 'build' task so that it ensures coverage tasks.getByName(LifecycleBasePlugin.BUILD_TASK_NAME).dependsOn(verificationTaskName); } private void configureJacocoTask(final JacocoReportBase jacocoTask) { jacocoTask.setGroup(LifecycleBasePlugin.VERIFICATION_GROUP); final SourceSetContainer sourceSets = (SourceSetContainer) project.getProperties().get("sourceSets"); final SourceSet csaccessSourceSet = sourceSets.getByName(CSACCESS_SOURCESET_NAME); jacocoTask.getClassDirectories().from(csaccessSourceSet.getOutput().getClassesDirs()); jacocoTask.getSourceDirectories().from(csaccessSourceSet.getJava().getSourceDirectories()); final FileCollection execFiles = project.files(project.getTasks().withType(CsaccessTestTask.class).stream() .map((final CsaccessTestTask task) -> new File(project.getBuildDir() + "/jacoco", task.getName() + ".exec")) .collect(Collectors.toList())); jacocoTask.getExecutionData().from(execFiles); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.gui.util; import java.awt.Color; public class JMeterColor { public static final Color DARK_GREEN = new Color(0F, .5F, 0F); public static final Color LAVENDER = new Color(206F / 255F, 207F / 255F, 1F); public static final Color PURPLE = new Color(150 / 255F, 0, 150 / 255F); }
/* * Copyright (C) 2013 Square, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package online.sniper.net.okhttp3; import java.nio.charset.Charset; import java.util.Locale; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * An <a href="http://tools.ietf.org/html/rfc2045">RFC 2045</a> Media Type, appropriate to describe * the content type of an HTTP request or response body. */ public final class MediaType { private static final String TOKEN = "([a-zA-Z0-9-!#$%&'*+.^_`{|}~]+)"; private static final String QUOTED = "\"([^\"]*)\""; private static final Pattern TYPE_SUBTYPE = Pattern.compile(TOKEN + "/" + TOKEN); private static final Pattern PARAMETER = Pattern.compile( ";\\s*(?:" + TOKEN + "=(?:" + TOKEN + "|" + QUOTED + "))?"); private final String mediaType; private final String type; private final String subtype; private final String charset; private MediaType(String mediaType, String type, String subtype, String charset) { this.mediaType = mediaType; this.type = type; this.subtype = subtype; this.charset = charset; } /** * Returns a media type for {@code string}, or null if {@code string} is not a well-formed media * type. */ public static MediaType parse(String string) { Matcher typeSubtype = TYPE_SUBTYPE.matcher(string); if (!typeSubtype.lookingAt()) return null; String type = typeSubtype.group(1).toLowerCase(Locale.US); String subtype = typeSubtype.group(2).toLowerCase(Locale.US); String charset = null; Matcher parameter = PARAMETER.matcher(string); for (int s = typeSubtype.end(); s < string.length(); s = parameter.end()) { parameter.region(s, string.length()); if (!parameter.lookingAt()) return null; // This is not a well-formed media type. String name = parameter.group(1); if (name == null || !name.equalsIgnoreCase("charset")) continue; String charsetParameter; String token = parameter.group(2); if (token != null) { // If the token is 'single-quoted' it's invalid! But we're lenient and strip the quotes. charsetParameter = (token.startsWith("'") && token.endsWith("'") && token.length() > 2) ? token.substring(1, token.length() - 1) : token; } else { // Value is "double-quoted". That's valid and our regex group already strips the quotes. charsetParameter = parameter.group(3); } if (charset != null && !charsetParameter.equalsIgnoreCase(charset)) { throw new IllegalArgumentException("Multiple different charsets: " + string); } charset = charsetParameter; } return new MediaType(string, type, subtype, charset); } /** * Returns the high-level media type, such as "text", "image", "audio", "video", or * "application". */ public String type() { return type; } /** * Returns a specific media subtype, such as "plain" or "png", "mpeg", "mp4" or "xml". */ public String subtype() { return subtype; } /** * Returns the charset of this media type, or null if this media type doesn't specify a charset. */ public Charset charset() { return charset != null ? Charset.forName(charset) : null; } /** * Returns the charset of this media type, or {@code defaultValue} if this media type doesn't * specify a charset. */ public Charset charset(Charset defaultValue) { return charset != null ? Charset.forName(charset) : defaultValue; } /** * Returns the encoded media type, like "text/plain; charset=utf-8", appropriate for use in a * Content-Type header. */ @Override public String toString() { return mediaType; } @Override public boolean equals(Object o) { return o instanceof MediaType && ((MediaType) o).mediaType.equals(mediaType); } @Override public int hashCode() { return mediaType.hashCode(); } }
package duelistmod.cards.pools.aqua; import com.megacrit.cardcrawl.cards.AbstractCard; import com.megacrit.cardcrawl.characters.AbstractPlayer; import com.megacrit.cardcrawl.core.CardCrawlGame; import com.megacrit.cardcrawl.localization.CardStrings; import com.megacrit.cardcrawl.monsters.AbstractMonster; import duelistmod.DuelistMod; import duelistmod.abstracts.DuelistCard; import duelistmod.helpers.Util; import duelistmod.patches.AbstractCardEnum; import duelistmod.powers.*; import duelistmod.variables.Tags; public class FishborgPlanter extends DuelistCard { // TEXT DECLARATION private static final CardStrings cardStrings = getCardStrings(); public static final String NAME = cardStrings.NAME; public static final String DESCRIPTION = cardStrings.DESCRIPTION; public static final String UPGRADE_DESCRIPTION = cardStrings.UPGRADE_DESCRIPTION; // /TEXT DECLARATION/ // STAT DECLARATION private static final CardRarity RARITY = CardRarity.UNCOMMON; private static final CardTarget TARGET = CardTarget.SELF; private static final CardType TYPE = CardType.SKILL; public static final CardColor COLOR = AbstractCardEnum.DUELIST_MONSTERS; private static final int COST = 1; // /STAT DECLARATION/ public FishborgPlanter() { super(getCARDID(), NAME, getIMG(), COST, DESCRIPTION, TYPE, COLOR, RARITY, TARGET); this.tags.add(Tags.MONSTER); this.tags.add(Tags.AQUA); this.tags.add(Tags.MACHINE); this.tags.add(Tags.FLUVIAL); this.misc = 0; this.specialCanUseLogic = true; this.useTributeCanUse = true; this.originalName = this.name; this.baseTributes = this.tributes = 1; this.magicNumber = this.baseMagicNumber = 2; this.secondMagic = this.baseSecondMagic = 4; } // Actions the card should do. @Override public void use(AbstractPlayer p, AbstractMonster m) { tribute(); incMaxSummons(this.magicNumber); fish(this.secondMagic); } // Which card to return when making a copy of this card. @Override public AbstractCard makeCopy() { return new FishborgPlanter(); } // Upgraded stats. @Override public void upgrade() { if (!this.upgraded) { if (this.timesUpgraded > 0) { this.upgradeName(NAME + "+" + this.timesUpgraded); } else { this.upgradeName(NAME + "+"); } this.upgradeBaseCost(0); this.rawDescription = UPGRADE_DESCRIPTION; this.initializeDescription(); } } // Tribute canUse() @Override public boolean canUse(AbstractPlayer p, AbstractMonster m) { if (this.specialCanUseLogic) { if (this.useTributeCanUse) { // Check super canUse() boolean canUse = super.canUse(p, m); if (!canUse) { return false; } // Pumpking & Princess else if (this.misc == 52) { return true; } // Mausoleum check else if (p.hasPower(EmperorPower.POWER_ID)) { EmperorPower empInstance = (EmperorPower)p.getPower(EmperorPower.POWER_ID); if (!empInstance.flag) { return true; } else { if (p.hasPower(SummonPower.POWER_ID)) { int temp = (p.getPower(SummonPower.POWER_ID).amount); if (temp >= this.tributes) { return true; } } } } // Check for # of summons >= tributes else { if (p.hasPower(SummonPower.POWER_ID)) { int temp = (p.getPower(SummonPower.POWER_ID).amount); if (temp >= this.tributes) { return true; } } } // Player doesn't have something required at this point this.cantUseMessage = this.tribString; return false; } else if (this.useBothCanUse) { // Check for monster zones challenge if (Util.isCustomModActive("theDuelist:SummonersChallenge") || DuelistMod.challengeLevel20) { if ((DuelistMod.getChallengeDiffIndex() < 3) && this.misc == 52) { return true; } // Check for energy and other normal game checks boolean canUse = super.canUse(p, m); if (!canUse) { return false; } // Mausoleum check else if (p.hasPower(EmperorPower.POWER_ID)) { // If mausoleum is active skip tribute check and just check monster zones for space EmperorPower empInstance = (EmperorPower)p.getPower(EmperorPower.POWER_ID); if (!empInstance.flag) { if (p.hasPower(SummonPower.POWER_ID)) { int sums = DuelistCard.getSummons(p); int max = DuelistCard.getMaxSummons(p); if (sums + this.summons <= max) { return true; } else { if (sums < max) { if (max - sums > 1) { this.cantUseMessage = "You only have " + (max - sums) + " monster zones"; } else { this.cantUseMessage = "You only have " + (max - sums) + " monster zone"; } } else { this.cantUseMessage = "No monster zones remaining"; } return false; } } else { return true; } } // If no mausoleum active, check tributes and then check summons else { if (p.hasPower(SummonPower.POWER_ID)) { int sums = DuelistCard.getSummons(p); if (sums >= this.tributes) { int max = DuelistCard.getMaxSummons(p); if (sums - tributes < 0) { return true; } else { sums -= this.tributes; if (sums + this.summons <= max) { return true; } else { if (sums < max) { if (max - sums > 1) { this.cantUseMessage = "You only have " + (max - sums) + " monster zones"; } else { this.cantUseMessage = "You only have " + (max - sums) + " monster zone"; } } else { this.cantUseMessage = "No monster zones remaining"; } return false; } } } } } } // No mausoleum power - so just check for number of tributes and summon slots else { if (p.hasPower(SummonPower.POWER_ID)) { int sums = DuelistCard.getSummons(p); if (sums >= this.tributes) { int max = DuelistCard.getMaxSummons(p); if (sums - tributes < 0) { return true; } else { sums -= this.tributes; if (sums + this.summons <= max) { return true; } else { if (sums < max) { if (max - sums > 1) { this.cantUseMessage = "You only have " + (max - sums) + " monster zones"; } else { this.cantUseMessage = "You only have " + (max - sums) + " monster zone"; } } else { this.cantUseMessage = "No monster zones remaining"; } return false; } } } } } // Player doesn't have something required at this point this.cantUseMessage = this.tribString; return false; } // Default behavior - no monster zone challenge else { boolean canUse = super.canUse(p, m); if (!canUse) { return false; } // Pumpking & Princess else if (this.misc == 52) { return true; } // Mausoleum check else if (p.hasPower(EmperorPower.POWER_ID)) { EmperorPower empInstance = (EmperorPower)p.getPower(EmperorPower.POWER_ID); if (!empInstance.flag) { return true; } else { if (p.hasPower(SummonPower.POWER_ID)) { int temp = (p.getPower(SummonPower.POWER_ID).amount); if (temp >= this.tributes) { return true; } } } } // Check for # of summons >= tributes else { if (p.hasPower(SummonPower.POWER_ID)) { int temp = (p.getPower(SummonPower.POWER_ID).amount); if (temp >= this.tributes) { return true; } } } // Player doesn't have something required at this point this.cantUseMessage = this.tribString; return false; } } else { // Check super canUse() boolean canUse = super.canUse(p, m); if (!canUse) { return false; } if (Util.isCustomModActive("theDuelist:SummonersChallenge") || DuelistMod.challengeLevel20) { if ((DuelistMod.getChallengeDiffIndex() < 3) && this.misc == 52) { return true; } if (p.hasPower(SummonPower.POWER_ID)) { int sums = DuelistCard.getSummons(p); int max = DuelistCard.getMaxSummons(p); if (sums + this.summons <= max) { return true; } else { if (sums < max) { if (max - sums > 1) { this.cantUseMessage = "You only have " + (max - sums) + " monster zones"; } else { this.cantUseMessage = "You only have " + (max - sums) + " monster zone"; } } else { this.cantUseMessage = "No monster zones remaining"; } return false; } } else { return true; } } else { return true; } } } else { return super.canUse(p, m); } } @Override public void onTribute(DuelistCard tributingCard) { } @Override public void onResummon(int summons) { // TODO Auto-generated method stub } @Override public void summonThis(int summons, DuelistCard c, int var) { // TODO Auto-generated method stub } @Override public void summonThis(int summons, DuelistCard c, int var, AbstractMonster m) { // TODO Auto-generated method stub } @Override public String getID() { return getCARDID(); } @Override public void optionSelected(AbstractPlayer arg0, AbstractMonster arg1, int arg2) { // TODO Auto-generated method stub } // AUTOSETUP - ID/IMG - Id, Img name, and class name all must match to use this public static String getCARDID() { return DuelistMod.makeID(getCurClassName()); } public static CardStrings getCardStrings() { return CardCrawlGame.languagePack.getCardStrings(getCARDID()); } public static String getIMG() { return DuelistMod.makeCardPath(getCurClassName() + ".png"); } public static String getCurClassName() { return (new CurClassNameGetter()).getClassName(); } public static class CurClassNameGetter extends SecurityManager{ public String getClassName(){ return getClassContext()[1].getSimpleName(); } } // END AUTOSETUP }
package com.medical.common.beanvalidator; /** * 默认Bean验证组 * @author ThinkGem */ public interface DefaultGroup { }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client.asyncsearch; import org.elasticsearch.client.ValidationException; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.test.ESTestCase; import java.util.Optional; public class SubmitAsyncSearchRequestTests extends ESTestCase { public void testValidation() { { SearchSourceBuilder source = new SearchSourceBuilder(); SubmitAsyncSearchRequest request = new SubmitAsyncSearchRequest(source, "test"); Optional<ValidationException> validation = request.validate(); assertFalse(validation.isPresent()); } { SearchSourceBuilder source = new SearchSourceBuilder().suggest(new SuggestBuilder()); SubmitAsyncSearchRequest request = new SubmitAsyncSearchRequest(source, "test"); Optional<ValidationException> validation = request.validate(); assertTrue(validation.isPresent()); assertEquals(1, validation.get().validationErrors().size()); assertEquals("suggest-only queries are not supported", validation.get().validationErrors().get(0)); } { SubmitAsyncSearchRequest request = new SubmitAsyncSearchRequest(new SearchSourceBuilder(), "test"); request.setKeepAlive(new TimeValue(1)); Optional<ValidationException> validation = request.validate(); assertTrue(validation.isPresent()); assertEquals(1, validation.get().validationErrors().size()); assertEquals("[keep_alive] must be greater than 1 minute, got: 1ms", validation.get().validationErrors().get(0)); } } }
/* * Copyright 2019-present HiveMQ GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hivemq.bootstrap.netty.initializer; import com.hivemq.bootstrap.netty.ChannelDependencies; import com.hivemq.bootstrap.netty.FakeChannelPipeline; import com.hivemq.configuration.HivemqId; import com.hivemq.configuration.service.FullConfigurationService; import com.hivemq.configuration.service.RestrictionsConfigurationService; import com.hivemq.configuration.service.entity.Listener; import com.hivemq.configuration.service.entity.Tls; import com.hivemq.configuration.service.entity.TlsTcpListener; import com.hivemq.logging.EventLog; import com.hivemq.mqtt.handler.disconnect.MqttServerDisconnector; import com.hivemq.mqtt.handler.disconnect.MqttServerDisconnectorImpl; import com.hivemq.security.ssl.SslFactory; import io.netty.channel.Channel; import io.netty.channel.ChannelPipeline; import io.netty.channel.socket.SocketChannel; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslHandler; import io.netty.util.Attribute; import io.netty.util.AttributeKey; import io.netty.util.concurrent.Future; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import static com.hivemq.bootstrap.netty.ChannelHandlerNames.*; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.any; import static org.mockito.Mockito.when; public class TlsTcpChannelInitializerTest { @Mock private SocketChannel socketChannel; @Mock private Attribute<Listener> attribute; @Mock private ChannelDependencies channelDependencies; @Mock private SslHandler sslHandler; @Mock private TlsTcpListener tlsTcpListener; @Mock private Tls tls; @Mock private SslFactory sslFactory; @Mock private SslContext sslContext; @Mock private Future<Channel> future; @Mock private EventLog eventLog; @Mock private FullConfigurationService fullConfigurationService; @Mock private RestrictionsConfigurationService restrictionsConfigurationService; private ChannelPipeline pipeline; private TlsTcpChannelInitializer tlstcpChannelInitializer; @Before public void before() throws Exception { MockitoAnnotations.initMocks(this); pipeline = new FakeChannelPipeline(); when(tlsTcpListener.getTls()).thenReturn(tls); when(sslFactory.getSslContext(any(Tls.class))).thenReturn(sslContext); when(sslFactory.getSslHandler(any(SocketChannel.class), any(Tls.class), any(SslContext.class))).thenReturn(sslHandler); when(sslHandler.handshakeFuture()).thenReturn(future); when(socketChannel.pipeline()).thenReturn(pipeline); when(socketChannel.attr(any(AttributeKey.class))).thenReturn(attribute); when(socketChannel.isActive()).thenReturn(true); when(channelDependencies.getConfigurationService()).thenReturn(fullConfigurationService); when(channelDependencies.getRestrictionsConfigurationService()).thenReturn(restrictionsConfigurationService); when(restrictionsConfigurationService.incomingLimit()).thenReturn(0L); final MqttServerDisconnector mqttServerDisconnector =new MqttServerDisconnectorImpl(eventLog, new HivemqId()); when(channelDependencies.getMqttServerDisconnector()).thenReturn(mqttServerDisconnector); tlstcpChannelInitializer = new TlsTcpChannelInitializer(channelDependencies, tlsTcpListener, sslFactory); } @Test public void test_add_special_handlers() throws Exception { when(tls.getClientAuthMode()).thenReturn(Tls.ClientAuthMode.REQUIRED); tlstcpChannelInitializer.addSpecialHandlers(socketChannel); assertEquals(4, pipeline.names().size()); assertEquals(SSL_HANDLER, pipeline.names().get(0)); assertEquals(SSL_EXCEPTION_HANDLER, pipeline.names().get(1)); assertEquals(SSL_PARAMETER_HANDLER, pipeline.names().get(2)); assertEquals(SSL_CLIENT_CERTIFICATE_HANDLER, pipeline.names().get(3)); } @Test public void test_add_special_handlers_with_timeout() throws Exception { when(tls.getClientAuthMode()).thenReturn(Tls.ClientAuthMode.REQUIRED); when(tls.getHandshakeTimeout()).thenReturn(30); tlstcpChannelInitializer.addSpecialHandlers(socketChannel); assertEquals(6, pipeline.names().size()); assertEquals(SSL_HANDLER, pipeline.names().get(0)); assertEquals(SSL_EXCEPTION_HANDLER, pipeline.names().get(1)); assertEquals(SSL_PARAMETER_HANDLER, pipeline.names().get(2)); assertEquals(SSL_CLIENT_CERTIFICATE_HANDLER, pipeline.names().get(3)); assertEquals(NEW_CONNECTION_IDLE_HANDLER, pipeline.names().get(pipeline.names().size() - 2)); assertEquals(NO_TLS_HANDSHAKE_IDLE_EVENT_HANDLER, pipeline.names().get(pipeline.names().size() - 1)); } @Test public void test_add_special_handlers_no_cert() throws Exception { when(tls.getClientAuthMode()).thenReturn(Tls.ClientAuthMode.NONE); tlstcpChannelInitializer.addSpecialHandlers(socketChannel); assertEquals(3, pipeline.names().size()); assertEquals(SSL_HANDLER, pipeline.names().get(0)); assertEquals(SSL_EXCEPTION_HANDLER, pipeline.names().get(1)); assertEquals(SSL_PARAMETER_HANDLER, pipeline.names().get(2)); } }
package BQN.types.callable.builtins.md1; import BQN.Main; import BQN.errors.*; import BQN.tools.*; import BQN.types.*; import BQN.types.arrs.*; import BQN.types.callable.Md1Derv; import BQN.types.callable.builtins.Md1Builtin; import java.util.Arrays; public class EachBuiltin extends Md1Builtin { public String ln(FmtInfo f) { return "¨"; } public Value call(Value f, Value x, Md1Derv derv) { return on(f, x); } public static Value on(Value f, Value x) { if (x.scalar()) return SingleItemArr.r0(f.call(x.first())); if (f instanceof Callable) { MutVal res = new MutVal(x.shape); for (int i = 0; i < x.ia; i++) { res.set(i, f.call(x.get(i))); } return res.get(); } else { if (f instanceof Num && Num.isBool(((Num) f).num)) { // bitarr code is very bad at respecting SingleItemArrs long[] res = new long[BitArr.sizeof(x.ia)]; if (((Num) f).num==1) Arrays.fill(res, ~0L); return new BitArr(res, x.shape); } return new SingleItemArr(f, x.shape); } } public Value call(Value f, Value w, Value x, Md1Derv derv) { if (x.scalar()) { Value x0 = x.first(); if (w.scalar()) return SingleItemArr.r0(f.call(w.first(), x0)); Value[] n = new Value[w.ia]; for (int i = 0; i < n.length; i++) n[i] = f.call(w.get(i), x0); return Arr.create(n, w.shape); } if (w.scalar()) { Value[] n = new Value[x.ia]; Value w0 = w.first(); for (int i = 0; i < n.length; i++) n[i] = f.call(w0, x.get(i)); return Arr.create(n, x.shape); } int mr = Math.min(w.r(), x.r()); if (!Arr.eqPrefix(w.shape, x.shape, mr)) throw new LengthError("shape prefixes not equal ("+Main.fArr(w.shape)+" vs "+Main.fArr(x.shape)+")", derv); if (w.r() == x.r()) { MutVal res = new MutVal(x.shape); for (int i = 0; i < x.ia; i++) res.set(i, f.call(w.get(i), x.get(i))); return res.get(); } boolean we = w.r() < x.r(); // w is expanded int max = Math.max(w.ia, x.ia); int min = Math.min(w.ia, x.ia); if (min==0) { return we? x : w; } else { int ext = max/min; Value[] n = new Value[max]; int r = 0; if (we) for (int i = 0; i < min; i++) { Value c = w.get(i); for (int j = 0; j < ext; j++) { n[r] = f.call(c, x.get(r)); r++; } } else for (int i = 0; i < min; i++) { Value c = x.get(i); for (int j = 0; j < ext; j++) { n[r] = f.call(w.get(r), c); r++; } } return Arr.create(n, we? x.shape : w.shape); } } public Value callInv(Value f, Value x) { return onInv(f, x, this); } public static Value onInv(Value f, Value x, Callable blame) { if (x instanceof Primitive) throw new DomainError("F"+blame+"⁼: argument cannot be an atom", blame); if (!(f instanceof Fun)) throw new DomainError("can't invert A"+blame, blame); Value[] n = new Value[x.ia]; for (int i = 0; i < n.length; i++) { n[i] = f.callInv(x.get(i)); } if (x.r() == 0 && n[0] instanceof Primitive) return n[0]; return Arr.create(n, x.shape); } public Value under(Value f, Value o, Value x, Md1Derv derv) { Value[] res2 = new Value[x.ia]; rec(f, o, x, 0, new Value[x.ia], new Value[1], res2); return Arr.create(res2, x.shape); } private static void rec(Value f, Value o, Value x, int i, Value[] args, Value[] resPre, Value[] res) { if (i == args.length) { Value v = o instanceof Fun? o.call(Arr.create(args, x.shape)) : o; resPre[0] = v; } else { res[i] = f.under(new Fun() { public String ln(FmtInfo fi) { return f.ln(fi)+"¨"; } public Value call(Value x1) { args[i] = x1; rec(f, o, x, i+1, args, resPre, res); return resPre[0].get(i); } }, x.get(i)); } } public Value underW(Value f, Value o, Value w, Value x, Md1Derv derv) { return underW(f, o, w, x, this); } public static Value underW(Value f, Value o, Value w, Value x, Callable blame) { if (w.r()!=0 && x.r()!=0 && !Arrays.equals(w.shape, x.shape)) throw new LengthError("shapes not equal ("+Main.fArr(w.shape)+" vs "+Main.fArr(x.shape)+")", blame); int ia = Math.max(w.ia, x.ia); Value[] res2 = new Value[ia]; if (w.r()==0 && !(w instanceof Primitive)) w = SingleItemArr.r0(w.first()); // abuse that get doesn't check indexes for simple scalar extension if (x.r()==0 && !(x instanceof Primitive)) x = SingleItemArr.r0(w.first()); rec(f, o, w, x, 0, new Value[ia], new Value[1], res2); return Arr.create(res2, x.shape); } private static void rec(Value f, Value o, Value w, Value x, int i, Value[] args, Value[] resPre, Value[] res) { if (i == args.length) { Value v = o instanceof Fun? o.call(Arr.create(args, x.shape)) : o; resPre[0] = v; } else { res[i] = f.underW(new Fun() { public String ln(FmtInfo fi) { return f.ln(fi)+"¨"; } public Value call(Value x1) { args[i] = x1; rec(f, o, w, x, i+1, args, resPre, res); return resPre[0].get(i); } }, w.get(i), x.get(i)); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dtstack.chunjun.connector.doris.options; import java.util.List; import java.util.Properties; import java.util.StringJoiner; /** * @author tiezhu@dtstack * @date 2021/9/17 星期五 */ public class DorisConfBuilder { private final DorisConf dorisConf; public DorisConfBuilder() { this.dorisConf = new DorisConf(); } public DorisConfBuilder setDatabase(String database) { this.dorisConf.setDatabase(database); return this; } public DorisConfBuilder setTable(String table) { this.dorisConf.setTable(table); return this; } public DorisConfBuilder setFeNodes(List<String> feNodes) { this.dorisConf.setFeNodes(feNodes); return this; } public DorisConfBuilder setUsername(String username) { this.dorisConf.setUsername(username); return this; } public DorisConfBuilder setPassword(String password) { this.dorisConf.setPassword(password); return this; } public DorisConfBuilder setWriteMode(String writeMode) { this.dorisConf.setWriteMode(writeMode); return this; } public DorisConfBuilder setNameMapped(boolean needNameMapping) { this.dorisConf.setNameMapped(needNameMapping); return this; } public DorisConfBuilder setLoadOptions(LoadConf loadConf) { this.dorisConf.setLoadConf(loadConf); return this; } public DorisConfBuilder setLoadProperties(Properties loadProperties) { this.dorisConf.setLoadProperties(loadProperties); return this; } public DorisConfBuilder setBatchSize(int batchSize) { this.dorisConf.setBatchSize(batchSize); return this; } public DorisConfBuilder setFlushIntervalMills(long flushIntervalMills) { this.dorisConf.setFlushIntervalMills(flushIntervalMills); return this; } public DorisConfBuilder setMaxRetries(int maxRetries) { this.dorisConf.setMaxRetries(maxRetries); return this; } public DorisConfBuilder setWaitRetryMills(long waitRetryMills) { this.dorisConf.setWaitRetryMills(waitRetryMills); return this; } public DorisConf build() { StringJoiner errorMessage = new StringJoiner("\n"); if (dorisConf.getFeNodes() == null || dorisConf.getFeNodes().isEmpty()) { errorMessage.add("Doris FeNodes can not be empty!"); } if (dorisConf.getUsername() == null || dorisConf.getUsername().isEmpty()) { errorMessage.add("Doris Username can not be empty!"); } if (errorMessage.length() > 0) { throw new IllegalArgumentException("Doris Options error:\n" + errorMessage); } return dorisConf; } }
package com.bham.bd.entity.ai.behavior; import com.bham.bd.utils.GeometryEnhanced; import javafx.geometry.Point2D; import javafx.scene.canvas.GraphicsContext; import javafx.scene.paint.Color; import javafx.scene.shape.Rectangle; import javafx.scene.transform.Rotate; import static com.bham.bd.components.Controller.services; /** * <p>Condition for checking whether a straight path (represented as a rectangle) between 2 points * is free. It tests whether the path intersects any obstacles in the game map but it doesn't * check for any character or bullet collision.</p> */ public class FreePathCondition implements Condition { /** * source point a path starts from */ private Point2D start; /** * destination point a path ends at */ private Point2D end; /** * radius of an entity that has to fit through the path (i.e., half of the width of the path) */ private double radius; /** * Constructs a condition with placeholder points and a radius of 1 representing the path as a line */ public FreePathCondition() { start = new Point2D(0, 0); end = new Point2D(0, 0); radius = 1; } /** * Constructs a condition with placeholder points and a given radius */ public FreePathCondition(double radius) { start = new Point2D(0, 0); end = new Point2D(0, 0); this.radius = radius; } /** * Constructs a condition with 2 provided points and setting radius to 1 (as if it was a line) * @param start source point a path starts from * @param end destination point a path ends at */ public FreePathCondition(Point2D start, Point2D end) { this.start = start; this.end = end; radius = 1; } /** * Constructs a condition with 2 provided points and a given radius * @param start source point a path starts from * @param end destination point a path ends at * @param radius radius of an entity that has to fit through the path (i.e., half of the width of the path) */ public FreePathCondition(Point2D start, Point2D end, double radius) { this.start = start; this.end = end; this.radius = radius; } /** * Sets 2 points as testing values for the <i>getPath()</i> method. The path will be checked between them. * * @param start source point a path starts from * @param end destination point a path ends at */ public void setTestValues(Point2D start, Point2D end) { this.start = start; this.end = end; } /** * Sets 2 points as testing values for the <i>getPath()</i> method. The path will be checked between them. * * @param start source point a path starts from * @param end destination point a path ends at * @param radius radius of an entity that has to fit through the path (i.e., half of the width of the path) */ public void setTestValues(Point2D start, Point2D end, double radius) { this.start = start; this.end = end; this.radius = radius; } /** * Returns the path from start to end point * * <p>It calculates the angle and the distance between them, creates a rectangle of the same length as the * distance and rotates it appropriately to connect the 2 points. The height of the rectangle is defined by * the radius property, i.e., the height is twice the radius size.</p> * * @return a properly rotated Rectangle object connecting start and end points */ public Rectangle getPath() { // double angle = Math.atan2(end.getY() - start.getY(), end.getX() - start.getX()); // Rectangle path = new Rectangle(start.getX(), start.getY(), start.distance(end), radius * 2); // path.getTransforms().add(new Rotate(Math.toDegrees(angle), start.getX(), start.getY())); double rayCastWidth = radius*2; double angle = GeometryEnhanced.clockWiseAngle(end.subtract(start),new Point2D(0,1)); double dis = start.distance(end); Point2D center = start.midpoint(end); Point2D topLeft = center.subtract(rayCastWidth*0.5,dis/2); Rectangle hitBox = new Rectangle(topLeft.getX(), topLeft.getY(), rayCastWidth, dis); hitBox.getTransforms().add(new Rotate(angle, center.getX(),center.getY())); return hitBox; } /** * Renders the calculated path red if there are obstacles in it and green otherwise * @param gc graphics context on which the path will be drawn */ public void render(GraphicsContext gc) { gc.setStroke(test() ? Color.GREEN : Color.RED); gc.setLineWidth(1); gc.strokeRect(getPath().getX(), getPath().getY(), getPath().getWidth(), getPath().getHeight()); } /** * Tests for the path to not contain any obstacles in its area * @return true if there are no obstacles in a way and false otherwise */ @Override public boolean test() { return !services.intersectsObstacles(getPath()); } }
/* * Copyright (C) 2020 The Baremaps Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.baremaps.collection; import com.baremaps.collection.memory.Memory; import com.baremaps.collection.type.DataType; import java.io.Closeable; import java.io.IOException; import java.nio.ByteBuffer; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * A data store backed by a {@link DataType} and a {@link Memory}. Data is appended to the store and * can be accessed by its position in the {@link Memory}. */ public class DataStore<T> implements Closeable, Cleanable { private final DataType<T> dataType; private final Memory memory; private final long segmentSize; private long offset; private long size; private Lock lock = new ReentrantLock(); /** * Constructs a data store. * * @param dataType the data type * @param memory the memory */ public DataStore(DataType<T> dataType, Memory memory) { this.dataType = dataType; this.memory = memory; this.segmentSize = memory.segmentSize(); this.offset = 0; this.size = 0; } /** * Appends a value to the data store and returns its position in the memory. * * @param value the value * @return the position of the value in the memory. */ public long add(T value) { int valueSize = dataType.size(value); if (valueSize > segmentSize) { throw new StoreException("The value is too big to fit in a segment"); } lock.lock(); long position = offset; long segmentIndex = position / segmentSize; long segmentOffset = position % segmentSize; if (segmentOffset + valueSize > segmentSize) { segmentOffset = 0; segmentIndex = segmentIndex + 1; position = segmentIndex * segmentSize; } offset = position + valueSize; this.size++; lock.unlock(); ByteBuffer segment = memory.segment((int) segmentIndex); dataType.write(segment, (int) segmentOffset, value); return position; } /** * Returns a values by its position in memory. * * @param position the position of the value * @return the value */ public T get(long position) { long segmentIndex = position / segmentSize; long segmentOffset = position % segmentSize; ByteBuffer buffer = memory.segment((int) segmentIndex); return dataType.read(buffer, (int) segmentOffset); } /** * Returns the number of values stored in the data store. * * @return the number of values */ public long size() { return size; } /** {@inheritDoc} */ @Override public void clean() throws IOException { memory.clean(); } /** {@inheritDoc} */ @Override public void close() throws IOException { memory.close(); } }
package drive; import adapter.BuildAuto; /** * Drive class is used to test whether the operation on database is right, like create, insert, update and delete. * @author willQian */ public class Drive { public static void main(String [] args) { BuildAuto ca = new BuildAuto(); ca.BuildAuto("FordZTW"); ca.BuildAuto("NissanAltima"); ca.BuildAuto("HondaAccord"); //update option set ca.updateOptionSet("FordZTW", "Color", "New Color"); ca.updateOptionSet("HondaAccord", "Brakes", "New Brakes"); //update option ca.updateOptionNameAndPrice("FordZTW", "New Color", "Liquid Grey Clearcoat Metallic", "Liquid Grey", 10); ca.updateOptionNameAndPrice("HondaAccord", "Transmission", "Automatic", "Automatic and Manual", 219); //delete option set ca.deleteOptionSet("FordZTW", "Transmission"); ca.deleteOptionSet("HondaAccord", "Power Moonroof"); //delete option ca.deleteOption("FordZTW", "New Color", "Fort Knox Gold Clearcoat Metallic"); ca.deleteOption("HondaAccord", "Side Impact Air Bags", "not present"); //delete car ca.deleteCar("NissanAltima"); //print out the result ca.printAuto("FordZTW"); System.out.println(); ca.printAuto("NissanAltima"); System.out.println(); ca.printAuto("HondaAccord"); } }
/* * MIT License * * Copyright (c) 2021 MASES s.r.l. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ /************************************************************************************** * <auto-generated> * This code was generated from a template using JCOReflector * * Manual changes to this file may cause unexpected behavior in your application. * Manual changes to this file will be overwritten if the code is regenerated. * </auto-generated> *************************************************************************************/ package system.drawing; import org.mases.jcobridge.*; import org.mases.jcobridge.netreflection.*; // Import section import system.IDisposable; import system.IDisposableImplementation; /** * The base .NET class managing System.Drawing.IDeviceContext, System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a. * <p> * * See: <a href="https://docs.microsoft.com/en-us/dotnet/api/System.Drawing.IDeviceContext" target="_top">https://docs.microsoft.com/en-us/dotnet/api/System.Drawing.IDeviceContext</a> */ public interface IDeviceContext extends IJCOBridgeReflected, IDisposable { /** * Fully assembly qualified name: System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a */ public static final String assemblyFullName = "System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a"; /** * Assembly name: System.Drawing */ public static final String assemblyShortName = "System.Drawing"; /** * Qualified class name: System.Drawing.IDeviceContext */ public static final String className = "System.Drawing.IDeviceContext"; /** * Try to cast the {@link IJCOBridgeReflected} instance into {@link IDeviceContext}, a cast assert is made to check if types are compatible. * @param from {@link IJCOBridgeReflected} instance to be casted * @return {@link IDeviceContext} instance * @throws java.lang.Throwable in case of error during cast operation */ public static IDeviceContext ToIDeviceContext(IJCOBridgeReflected from) throws Throwable { JCOBridge bridge = JCOBridgeInstance.getInstance("System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a"); JCType classType = bridge.GetType(className + ", " + (JCOReflector.getUseFullAssemblyName() ? assemblyFullName : assemblyShortName)); NetType.AssertCast(classType, from); return new IDeviceContextImplementation(from.getJCOInstance()); } /** * Returns the reflected Assembly name * * @return A {@link String} representing the Fullname of reflected Assembly */ public String getJCOAssemblyName(); /** * Returns the reflected Class name * * @return A {@link String} representing the Fullname of reflected Class */ public String getJCOClassName(); /** * Returns the reflected Class name used to build the object * * @return A {@link String} representing the name used to allocated the object * in CLR context */ public String getJCOObjectName(); /** * Returns the instantiated class * * @return An {@link java.lang.Object} representing the instance of the instantiated Class */ public java.lang.Object getJCOInstance(); /** * Returns the instantiated class Type * * @return A {@link JCType} representing the Type of the instantiated Class */ public JCType getJCOType(); // Methods section public void ReleaseHdc() throws Throwable; // Properties section // Instance Events section }
/* * Copyright 2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openehealth.ipf.commons.ihe.hl7v2.audit.iti64; import org.junit.Test; import org.openehealth.ipf.commons.audit.codes.*; import org.openehealth.ipf.commons.ihe.hl7v2.audit.Hl7v2AuditorTestBase; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; /** * @author Christian Ohr */ public class Iti64AuditStrategyTest extends Hl7v2AuditorTestBase { @Test public void testServerSide() { testRequest(true); } @Test public void testClientSide() { testRequest(false); } private void testRequest(boolean serverSide) { var strategy = new Iti64AuditStrategy(serverSide); var auditDataset = getHl7v2AuditDataset(strategy); var auditMessage = makeAuditMessage(strategy, auditContext, auditDataset); assertNotNull(auditMessage); auditMessage.validate(); assertCommonV2AuditAttributes(auditMessage, EventOutcomeIndicator.Success, EventIdCode.PatientRecord, EventActionCode.Update, serverSide, true); assertEquals(2, auditMessage.findParticipantObjectIdentifications( poit -> poit.getParticipantObjectDataLifeCycle() == ParticipantObjectDataLifeCycle.Origination).size()); assertEquals(2, auditMessage.findParticipantObjectIdentifications( poit -> poit.getParticipantObjectDataLifeCycle() == ParticipantObjectDataLifeCycle.LogicalDeletion).size()); assertEquals(1, auditMessage.findParticipantObjectIdentifications( poit -> poit.getParticipantObjectTypeCode() == ParticipantObjectTypeCode.System).size()); // System.out.println(printAuditMessage(auditMessage)); } private Iti64AuditDataset getHl7v2AuditDataset(Iti64AuditStrategy strategy) { var auditDataset = strategy.createAuditDataset(); auditDataset.setEventOutcomeIndicator(EventOutcomeIndicator.Success); // auditDataset.setLocalAddress(SERVER_URI); auditDataset.setRemoteAddress(CLIENT_IP_ADDRESS); auditDataset.setMessageControlId(MESSAGE_ID); auditDataset.setNewPatientId(PATIENT_IDS[0]); auditDataset.setPreviousPatientId(PATIENT_IDS[1]); auditDataset.setLocalPatientId(PATIENT_IDS[0]); auditDataset.setSubsumedLocalPatientId(PATIENT_IDS[1]); auditDataset.setSendingFacility(SENDING_FACILITY); auditDataset.setSendingApplication(SENDING_APPLICATION); auditDataset.setReceivingFacility(RECEIVING_FACILITY); auditDataset.setReceivingApplication(RECEIVING_APPLICATION); auditDataset.setSubmissionSetUuid(SUBMISSION_SET_UUID); return auditDataset; } }
/******************************************************************************* * Copyright (c) Nov 10, 2015 @author <a href="mailto:iffiff1@gmail.com">Tyler Chen</a>. * All rights reserved. * * Contributors: * <a href="mailto:iffiff1@gmail.com">Tyler Chen</a> - initial API and implementation ******************************************************************************/ package org.iff.infra.util.freemarker.model; import java.util.List; import java.util.Locale; import org.apache.commons.lang3.StringUtils; import org.apache.shiro.SecurityUtils; import org.apache.shiro.subject.Subject; import org.iff.infra.util.I18nHelper; import org.iff.infra.util.ThreadLocalHelper; import org.iff.infra.util.freemarker.FreeMarkerTemplateModel; import freemarker.template.TemplateMethodModelEx; import freemarker.template.TemplateModelException; /** * @author <a href="mailto:iffiff1@gmail.com">Tyler Chen</a> * @since Nov 10, 2015 */ @FreeMarkerTemplateModel("i18n") @SuppressWarnings("rawtypes") public class I18nMethod implements TemplateMethodModelEx { public Object exec(List arguments) throws TemplateModelException { if (arguments != null) { if (arguments.size() > 0) { String key = arguments.size() > 0 ? arguments.get(0).toString() : null; String alias = arguments.size() > 1 ? arguments.get(1).toString() : ""; String namespace = arguments.size() > 2 ? arguments.get(2).toString() : null; Locale locale = null; I18nHelper i18n = null; { Object o = ThreadLocalHelper.get("locale"); if (o != null && o instanceof Locale) { locale = (Locale) o; } else { Subject subject = SecurityUtils.getSubject(); if (subject.getPrincipal() != null && subject.isAuthenticated()) { locale = (Locale) subject.getSession().getAttribute("locale"); } } } { Object o = ThreadLocalHelper.get("I18N"); if (o != null && o instanceof I18nHelper) { i18n = (I18nHelper) o; } } if (StringUtils.isNotBlank(key)) { if (i18n != null) { return i18n.getMessage(key, alias); } else if (StringUtils.isNotBlank(namespace)) { if (locale != null) { return I18nHelper.get(namespace, locale).getMessage(key, alias); } else { return I18nHelper.get(namespace).getMessage(key, alias); } } else if (locale != null) { return I18nHelper.get(null, locale).getMessage(key, alias); } else { return I18nHelper.me().getMessage(key, alias); } } else { return alias; } } } return ""; } }
package io.nutz.nutzsite.module.test.services; import io.nutz.nutzsite.common.service.BaseService; import io.nutz.nutzsite.module.test.models.Checkpoint; /** * 监测点位置 服务层实现 * * @author haiming * @date 2021-03-27 */ public interface CheckpointService extends BaseService<Checkpoint> { Object home(String id); }
package sMath.function; import java.util.Arrays; import java.util.Collection; import gnu.trove.set.hash.THashSet; import sMath.Expression; import sMath.VariableSymbol; import sMath.function.interfaces.IMultivariate; public class Power implements IMultivariate { protected Expression base, exponent; protected Power(Expression base, Expression exponent) { this.base=base; this.exponent=exponent; } @Override public Collection<Expression> arguments() { return Arrays.asList(base,exponent); } @Override public THashSet<VariableSymbol> getDependantVariables() { THashSet<VariableSymbol> s=base.getDependantVariables(); THashSet<VariableSymbol> l=exponent.getDependantVariables(); if(s.size()<l.size()) { l.addAll(s);return l; } else { s.addAll(l);return s; } } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2017 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.registry; import org.eclipse.core.runtime.FileLocator; import org.eclipse.core.runtime.IConfigurationElement; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.connection.DBPNativeClientLocation; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.runtime.OSDescriptor; import org.jkiss.dbeaver.registry.driver.DriverDescriptor; import org.jkiss.dbeaver.registry.driver.DriverLibraryRepository; import org.jkiss.dbeaver.registry.driver.DriverUtils; import org.jkiss.dbeaver.runtime.WebUtils; import org.jkiss.dbeaver.utils.ContentUtils; import java.io.*; import java.net.URL; import java.util.ArrayList; import java.util.List; /** * NativeClientDistributionDescriptor */ public class NativeClientDistributionDescriptor { private static final Log log = Log.getLog(NativeClientDistributionDescriptor.class); private final List<NativeClientFileDescriptor> files = new ArrayList<>(); private OSDescriptor os; private String targetPath; private String remotePath; private String resourcePath; public NativeClientDistributionDescriptor(IConfigurationElement config) { String osName = config.getAttribute(RegistryConstants.ATTR_OS); this.os = osName == null ? null : new OSDescriptor( osName, config.getAttribute(RegistryConstants.ATTR_ARCH)); this.targetPath = config.getAttribute("targetPath"); this.remotePath = config.getAttribute("remotePath"); this.resourcePath = config.getAttribute("resourcePath"); for (IConfigurationElement fileElement : config.getChildren("file")) { if (DriverUtils.matchesBundle(fileElement)) { this.files.add(new NativeClientFileDescriptor(fileElement)); } } } public OSDescriptor getOs() { return os; } public String getTargetPath() { return targetPath; } public String getRemotePath() { return remotePath; } public String getResourcePath() { return resourcePath; } public boolean downloadFiles(DBRProgressMonitor monitor, DBPNativeClientLocation location) throws DBException, InterruptedException { File targetPath = location.getPath(); List<NativeClientFileDescriptor> filesToDownload = new ArrayList<>(); for (NativeClientFileDescriptor file : files) { String fileName = file.getName(); File targetFile = new File(targetPath, fileName); if (!targetFile.exists()) { filesToDownload.add(file); } } if (filesToDownload.isEmpty()) { return true; } if (!targetPath.exists()) { if (!targetPath.mkdirs()) { throw new DBException("Can't create target folder '" + targetPath.getAbsolutePath() + "'"); } } for (int i = 0; i < filesToDownload.size(); i++) { if (monitor.isCanceled()) { throw new InterruptedException(); } NativeClientFileDescriptor file = filesToDownload.get(i); String fileName = file.getName(); File targetFile = new File(targetPath, fileName); String fileRemotePath = remotePath + "/" + file.getName(); String localResourcePath = resourcePath + "/" + file.getName(); { // Try to extract local resource file URL url = DataSourceProviderRegistry.getInstance().findResourceURL(localResourcePath); if (url != null) { try { url = FileLocator.toFileURL(url); File localFile = new File(url.getFile()); if (localFile.exists()) { try (InputStream is = new FileInputStream(localFile)) { try (OutputStream os = new FileOutputStream(targetFile)) { ContentUtils.copyStreams(is, localFile.length(), os, monitor); } return true; } catch (IOException e) { if (targetFile.exists()) { if (!targetFile.delete()) { log.debug("Error deleting client file '" + targetFile.getAbsolutePath() + "'"); } } log.debug("IO error copying resource file '" + localResourcePath + "'", e); } } } catch (IOException ex) { log.debug("Error locating resource file '" + localResourcePath + "'", ex); } } } // Try to download remote file if (fileRemotePath.startsWith(DriverLibraryRepository.PATH_PREFIX)) { // Repository file fileRemotePath = fileRemotePath.substring(DriverLibraryRepository.PATH_PREFIX.length()); String primarySource = DriverDescriptor.getDriversPrimarySource(); if (!primarySource.endsWith("/") && !fileRemotePath.startsWith("/")) { primarySource += '/'; } String externalURL = primarySource + fileRemotePath; String taskName = "Download native client file '" + fileName + "'" + " (" + (i + 1) + "/" + filesToDownload.size() + ")"; monitor.beginTask(taskName, 1); try { WebUtils.downloadRemoteFile(monitor, taskName, externalURL, targetFile, null); } catch (IOException e) { log.debug("Error downloading file '" + fileName + "'", e); throw new DBException("Error downloading file '" + fileName + "': " + e.getMessage()); } } } return true; } @Override public String toString() { return os.toString(); } }
package fr.javatronic.blog.massive.annotation1.sub1; import fr.javatronic.blog.processor.Annotation_001; @Annotation_001 public class Class_2524 { }
package com.codeaholicguy.config4j; import com.codeaholicguy.config4j.util.KeyUtil; import org.apache.commons.configuration.CompositeConfiguration; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.HierarchicalINIConfiguration; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import java.io.File; import java.nio.file.Paths; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Objects; /** * @author hoangnn */ public class Config { private static final Logger LOGGER = Logger.getLogger(Config.class); private static Map<String, Config> instances; private final Map<String, String> configData = new HashMap(); private CompositeConfiguration config; private String configFolderPath; private String configFilename; private static final String DEFAULT_CONFIG_FILENAME = "config.ini"; private static final String EMPTY = ""; public static Config getInstance(String configFolderPath, String configFilename) { String key = KeyUtil.generateKey(configFolderPath, configFilename); Config instance; if (Objects.isNull(instances)) { instances = new HashMap<>(); instance = new Config(configFolderPath, configFilename); instances.put(key, instance); } else { instance = instances.get(key); if (Objects.isNull(instance)) { instance = new Config(configFolderPath, configFilename); instances.put(key, instance); } } return instance; } public static Config getInstance() { String key = KeyUtil.generateKey(EMPTY, DEFAULT_CONFIG_FILENAME); Config instance; if (Objects.isNull(instances)) { instances = new HashMap<>(); instance = new Config(null, null); instances.put(key, instance); } else { instance = instances.get(key); if (Objects.isNull(instance)) { instance = new Config(null, null); instances.put(key, instance); } } return instance; } private Config(String configFolderPath, String configFilename) { this.configFolderPath = Objects.isNull(configFolderPath) ? EMPTY : configFolderPath; this.configFilename = Objects.isNull(configFilename) ? DEFAULT_CONFIG_FILENAME : configFilename; initConfigData(); } private void initConfigData() { config = new CompositeConfiguration(); File configFile = new File(Paths.get(this.configFolderPath, this.configFilename).toUri()); try { config.addConfiguration(new HierarchicalINIConfiguration(configFile)); Iterator keys = config.getKeys(); while (keys.hasNext()) { String key = (String) keys.next(); configData.put(key, config.getString(key)); } } catch (ConfigurationException e) { LOGGER.error(e.getMessage()); } } public String getParameter(String section, String name) { String key = section + "." + name; String value = configData.get(key); if (Objects.isNull(value)) { return value; } else { value = config.getString(key); if (Objects.isNull(value)) { configData.put(key, value); } return value; } } }
package org.apache.tomcat.util.descriptor.web; import java.io.Serializable; /** * <p>Representation of a security role reference for a web application, as * represented in a <code>&lt;security-role-ref&gt;</code> element * in the deployment descriptor.</p> * * @since Tomcat 5.5 */ public class SecurityRoleRef implements Serializable { private static final long serialVersionUID = 1L; // ------------------------------------------------------------- Properties /** * The (required) role name. */ private String name = null; public String getName() { return this.name; } public void setName(String name) { this.name = name; } /** * The optional role link. */ private String link = null; public String getLink() { return this.link; } public void setLink(String link) { this.link = link; } // --------------------------------------------------------- Public Methods /** * Return a String representation of this object. */ @Override public String toString() { StringBuilder sb = new StringBuilder("SecurityRoleRef["); sb.append("name="); sb.append(name); if (link != null) { sb.append(", link="); sb.append(link); } sb.append("]"); return sb.toString(); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.vcs.log.graph.impl.print; import com.intellij.vcs.log.graph.GraphColorManager; import com.intellij.vcs.log.graph.api.LinearGraph; import com.intellij.vcs.log.graph.api.elements.GraphEdge; import com.intellij.vcs.log.graph.api.elements.GraphElement; import com.intellij.vcs.log.graph.api.elements.GraphNode; import com.intellij.vcs.log.graph.api.permanent.PermanentGraphInfo; import com.intellij.vcs.log.graph.utils.LinearGraphUtils; import com.intellij.vcs.log.graph.utils.NormalEdge; import org.jetbrains.annotations.NotNull; public class ColorGetterByLayoutIndex<CommitId> { @NotNull private final LinearGraph myLinearGraph; @NotNull private final PermanentGraphInfo<CommitId> myPermanentGraphInfo; @NotNull private final GraphColorManager<CommitId> myColorManager; public ColorGetterByLayoutIndex(@NotNull LinearGraph linearGraph, @NotNull PermanentGraphInfo<CommitId> permanentGraphInfo, @NotNull GraphColorManager<CommitId> colorManager) { myLinearGraph = linearGraph; myPermanentGraphInfo = permanentGraphInfo; myColorManager = colorManager; } public int getColorId(@NotNull GraphElement element) { if (element instanceof GraphNode) { int nodeId = myLinearGraph.getNodeId(((GraphNode)element).getNodeIndex()); return getNodeColor(nodeId, getLayoutIndex(nodeId)); } else { GraphEdge edge = (GraphEdge)element; NormalEdge normalEdge = LinearGraphUtils.asNormalEdge(edge); if (normalEdge == null) { int nodeId = myLinearGraph.getNodeId(LinearGraphUtils.getNotNullNodeIndex(edge)); return getNodeColor(nodeId, getLayoutIndex(nodeId)); } int upNodeId = myLinearGraph.getNodeId(normalEdge.up); int downNodeId = myLinearGraph.getNodeId(normalEdge.down); int upLayoutIndex = getLayoutIndex(upNodeId); int downLayoutIndex = getLayoutIndex(downNodeId); if (upLayoutIndex >= downLayoutIndex) { return getNodeColor(upNodeId, upLayoutIndex); } return getNodeColor(downNodeId, downLayoutIndex); } } private int getNodeColor(int nodeId, int layoutIndex) { int headNodeId = getHeadNodeId(nodeId); CommitId headCommitId = myPermanentGraphInfo.getPermanentCommitsInfo().getCommitId(headNodeId); if (layoutIndex == myPermanentGraphInfo.getPermanentGraphLayout().getLayoutIndex(headNodeId)) { return myColorManager.getColorOfBranch(headCommitId); } else { return myColorManager.getColorOfFragment(headCommitId, layoutIndex); } } private int getHeadNodeId(int nodeId) { if (nodeId < 0) return 0; return myPermanentGraphInfo.getPermanentGraphLayout().getOneOfHeadNodeIndex(nodeId); } private int getLayoutIndex(int nodeId) { if (nodeId < 0) return nodeId; return myPermanentGraphInfo.getPermanentGraphLayout().getLayoutIndex(nodeId); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2019 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.openapi.converter.swagger; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.Arrays; import java.util.List; import java.util.function.Function; import org.apache.commons.lang3.tuple.Pair; import org.hamcrest.Matcher; import org.junit.jupiter.api.Test; /** Unit test for {@link UriBuilder}. */ class UriBuilderUnitTest { private static final ParseMethod PARSE = new ParseMethod("parse", UriBuilder::parse); private static final ParseMethod PARSE_LENIENT = new ParseMethod("parseLenient", UriBuilder::parseLenient); private static final List<ParseMethod> PARSE_METHODS = Arrays.asList(PARSE, PARSE_LENIENT); private static final List<Pair<ParseMethod, ParseMethod>> PARSE_METHODS_MERGE = Arrays.asList( Pair.of(PARSE, PARSE), Pair.of(PARSE_LENIENT, PARSE_LENIENT), Pair.of(PARSE, PARSE_LENIENT), Pair.of(PARSE_LENIENT, PARSE)); @Test void shouldParseWithNullValue() { PARSE_METHODS.forEach( method -> { // Given String value = null; // When UriBuilder uriBuilder = method.parse(value); // Then assertUriComponents( method, uriBuilder, is(nullValue()), is(nullValue()), is(nullValue())); }); } private static void assertUriComponents( ParseMethod method, UriBuilder uriBuilder, Matcher<Object> schemeMatcher, Matcher<Object> authorityMatcher, Matcher<Object> pathMatcher) { assertUriComponents(method, null, uriBuilder, schemeMatcher, authorityMatcher, pathMatcher); } private static void assertUriComponents( ParseMethod method, ParseMethod otherMethod, UriBuilder uriBuilder, Matcher<Object> schemeMatcher, Matcher<Object> authorityMatcher, Matcher<Object> pathMatcher) { String reason = "Parsed with: " + method.name; if (otherMethod != null) { reason += " and " + otherMethod.name; } assertThat(reason, uriBuilder.getScheme(), schemeMatcher); assertThat(reason, uriBuilder.getAuthority(), authorityMatcher); assertThat(reason, uriBuilder.getPath(), pathMatcher); } @Test void shouldParseWithEmptyValue() { PARSE_METHODS.forEach( method -> { // Given String value = ""; // When UriBuilder uriBuilder = method.parse(value); // Then assertUriComponents( method, uriBuilder, is(nullValue()), is(nullValue()), is(nullValue())); }); } @Test void shouldParseWithJustRelativePath() { // Given ParseMethod method = PARSE; String value = "relativePath"; // When UriBuilder uriBuilder = method.parse(value); // Then assertUriComponents( method, uriBuilder, is(nullValue()), is(nullValue()), is(equalTo(value))); } @Test void shouldParseLenientWithJustAuthority() { // Given ParseMethod method = PARSE_LENIENT; String value = "authority"; // When UriBuilder uriBuilder = method.parse(value); // Then assertUriComponents( method, uriBuilder, is(nullValue()), is(equalTo(value)), is(nullValue())); } @Test void shouldParseWithAbsolutePath() { PARSE_METHODS.forEach( method -> { // Given String value = "/absolutePath"; // When UriBuilder uriBuilder = method.parse(value); // Then assertUriComponents( method, uriBuilder, is(nullValue()), is(nullValue()), is(equalTo(value))); }); } @Test void shouldParseWithAuthorityAndNoScheme() { PARSE_METHODS.forEach( method -> { // Given String value = "//example.com"; // When UriBuilder uriBuilder = method.parse(value); // Then assertUriComponents( method, uriBuilder, is(nullValue()), is(equalTo("example.com")), is(nullValue())); }); } @Test void shouldParseWithEmptyAuthority() { PARSE_METHODS.forEach( method -> { // Given String value = "//"; // When UriBuilder uriBuilder = method.parse(value); // Then assertUriComponents( method, uriBuilder, is(nullValue()), is(nullValue()), is(nullValue())); }); } @Test void shouldParseWithScheme() { PARSE_METHODS.forEach( method -> { // Given String value = "http://"; // When UriBuilder uriBuilder = method.parse(value); // Then assertUriComponents( method, uriBuilder, is(equalTo("http")), is(nullValue()), is(nullValue())); }); } @Test void shouldFailToParseWithEmptyScheme() { PARSE_METHODS.forEach( method -> { // Given String value = "://"; // When / Then IllegalArgumentException e = assertThrows( IllegalArgumentException.class, () -> UriBuilder.parse(value)); assertThat( "Parsed with: " + method, e.getMessage(), containsString("Expected non-empty scheme")); }); } @Test void shouldFailToParseWithMalformedScheme() { PARSE_METHODS.forEach( method -> { // Given String value = "notascheme//"; // When / Then IllegalArgumentException e = assertThrows( IllegalArgumentException.class, () -> UriBuilder.parse(value)); assertThat( "Parsed with: " + method, e.getMessage(), containsString("Expected no scheme")); }); } @Test void shouldParseWithSchemeAndAuthority() { PARSE_METHODS.forEach( method -> { // Given String value = "http://example.com"; // When UriBuilder uriBuilder = method.parse(value); // Then assertUriComponents( method, uriBuilder, is(equalTo("http")), is(equalTo("example.com")), is(nullValue())); }); } @Test void shouldParseWithSchemeAuthorityAndEmptyPath() { PARSE_METHODS.forEach( method -> { // Given String value = "http://example.com/"; // When UriBuilder uriBuilder = method.parse(value); // Then assertUriComponents( method, uriBuilder, is(equalTo("http")), is(equalTo("example.com")), is(equalTo("/"))); }); } @Test void shouldParseWithSchemeAuthorityAndNonEmptyPath() { PARSE_METHODS.forEach( method -> { // Given String value = "http://example.com/path"; // When UriBuilder uriBuilder = method.parse(value); // Then assertUriComponents( method, uriBuilder, is(equalTo("http")), is(equalTo("example.com")), is(equalTo("/path"))); }); } @Test void shouldThrowNullPointerIfMergingToNull() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse(""); // When / Then NullPointerException e = assertThrows(NullPointerException.class, () -> uriBuilder.merge(null)); assertThat("Parsed with: " + method, e, is(not(nullValue()))); }); } @Test void shouldMergeSchemeIfNull() { PARSE_METHODS_MERGE.forEach( pair -> { // Given ParseMethod method1 = pair.getLeft(); ParseMethod method2 = pair.getRight(); UriBuilder uriBuilder = method1.parse("//example.com/path/"); UriBuilder otherUrlBuilder = method2.parse("https://other.example.com/otherpath/"); // When uriBuilder.merge(otherUrlBuilder); // Then assertUriComponents( method1, method2, uriBuilder, is(equalTo("https")), is(equalTo("example.com")), is(equalTo("/path/"))); }); } @Test void shouldNotMergeSchemeIfNotNull() { PARSE_METHODS_MERGE.forEach( pair -> { // Given ParseMethod method1 = pair.getLeft(); ParseMethod method2 = pair.getRight(); UriBuilder uriBuilder = method1.parse("http://example.com/path/"); UriBuilder otherUrlBuilder = method2.parse("https://other.example.com/otherpath/"); // When uriBuilder.merge(otherUrlBuilder); // Then assertUriComponents( method1, method2, uriBuilder, is(equalTo("http")), is(equalTo("example.com")), is(equalTo("/path/"))); }); } @Test void shouldMergeAuthorityIfNull() { PARSE_METHODS_MERGE.forEach( pair -> { // Given ParseMethod method1 = pair.getLeft(); ParseMethod method2 = pair.getRight(); UriBuilder uriBuilder = method1.parse("http://"); UriBuilder otherUrlBuilder = method2.parse("https://other.example.com/otherpath/"); // When uriBuilder.merge(otherUrlBuilder); // Then assertUriComponents( method1, method2, uriBuilder, is(equalTo("http")), is(equalTo("other.example.com")), is(not(nullValue()))); }); } @Test void shouldNotMergeAuthorityIfNotNull() { PARSE_METHODS_MERGE.forEach( pair -> { // Given ParseMethod method1 = pair.getLeft(); ParseMethod method2 = pair.getRight(); UriBuilder uriBuilder = method1.parse("http://example.com"); UriBuilder otherUrlBuilder = method2.parse("https://other.example.com/otherpath/"); // When uriBuilder.merge(otherUrlBuilder); // Then assertUriComponents( method1, method2, uriBuilder, is(equalTo("http")), is(equalTo("example.com")), is(not(nullValue()))); }); } @Test void shouldMergePathIfNull() { PARSE_METHODS_MERGE.forEach( pair -> { // Given ParseMethod method1 = pair.getLeft(); ParseMethod method2 = pair.getRight(); UriBuilder uriBuilder = method1.parse("http://example.com"); UriBuilder otherUrlBuilder = method2.parse("https://other.example.com/otherpath/"); // When uriBuilder.merge(otherUrlBuilder); // Then assertUriComponents( method1, method2, uriBuilder, is(equalTo("http")), is(equalTo("example.com")), is(equalTo("/otherpath/"))); }); } @Test void shouldNotMergePathIfNotNull() { PARSE_METHODS_MERGE.forEach( pair -> { // Given ParseMethod method1 = pair.getLeft(); ParseMethod method2 = pair.getRight(); UriBuilder uriBuilder = method1.parse("http://example.com/"); UriBuilder otherUrlBuilder = method2.parse("https://other.example.com/otherpath/"); // When uriBuilder.merge(otherUrlBuilder); // Then assertUriComponents( method1, method2, uriBuilder, is(equalTo("http")), is(equalTo("example.com")), is(equalTo("/"))); }); } @Test void shouldMergeSchemeAuthorityIfJustAbsolutePath() { PARSE_METHODS_MERGE.forEach( pair -> { // Given ParseMethod method1 = pair.getLeft(); ParseMethod method2 = pair.getRight(); UriBuilder uriBuilder = method1.parse("/path"); UriBuilder otherUrlBuilder = method2.parse("https://other.example.com/otherpath/"); // When uriBuilder.merge(otherUrlBuilder); // Then assertUriComponents( method1, method2, uriBuilder, is(equalTo("https")), is(equalTo("other.example.com")), is(equalTo("/path"))); }); } @Test void shouldMergeRelativePath() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = UriBuilder.parse("relativePath"); UriBuilder otherUrlBuilder = method.parse("https://other.example.com/otherpath"); // When uriBuilder.merge(otherUrlBuilder); // Then assertUriComponents( method, uriBuilder, is(equalTo("https")), is(equalTo("other.example.com")), is(equalTo("/otherpath/relativePath"))); }); } @Test void shouldMergeRelativePathWithPathEndedWithSlash() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = UriBuilder.parse("relativePath"); UriBuilder otherUrlBuilder = method.parse("https://other.example.com/otherpath/"); // When uriBuilder.merge(otherUrlBuilder); // Then assertUriComponents( method, uriBuilder, is(equalTo("https")), is(equalTo("other.example.com")), is(equalTo("/otherpath/relativePath"))); }); } @Test void shouldMergeRelativePathWithNoPath() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = UriBuilder.parse("relativePath"); UriBuilder otherUrlBuilder = method.parse("https://other.example.com"); // When uriBuilder.merge(otherUrlBuilder); // Then assertUriComponents( method, uriBuilder, is(equalTo("https")), is(equalTo("other.example.com")), is(equalTo("relativePath"))); }); } @Test void shouldMergeSchemeAuthorityAndPathIfAllNull() { PARSE_METHODS_MERGE.forEach( pair -> { // Given ParseMethod method1 = pair.getLeft(); ParseMethod method2 = pair.getRight(); UriBuilder uriBuilder = method1.parse(null); UriBuilder otherUrlBuilder = method2.parse("https://other.example.com/otherpath/"); // When uriBuilder.merge(otherUrlBuilder); // Then assertUriComponents( method1, method2, uriBuilder, is(equalTo("https")), is(equalTo("other.example.com")), is(equalTo("/otherpath/"))); }); } @Test void shouldSetDefaultPathIfNotAlreadySet() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse("http://example.com"); // When uriBuilder.withDefaultPath("path"); // Then assertThat("Parsed with: " + method, uriBuilder.getPath(), is(equalTo("path"))); }); } @Test void shouldNotSetDefaultPathIfAlreadySet() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse("http://example.com/path"); // When uriBuilder.withDefaultPath("otherpath"); // Then assertThat( "Parsed with: " + method, uriBuilder.getPath(), is(equalTo("/path"))); }); } @Test void shouldBeEmptyWithoutSchemeAuthorityAndPath() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse(""); // When boolean empty = uriBuilder.isEmpty(); // Then assertThat("Parsed with: " + method, empty, is(equalTo(true))); }); } @Test void shouldNotBeEmptyWithScheme() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse("http://"); // When boolean empty = uriBuilder.isEmpty(); // Then assertThat("Parsed with: " + method, empty, is(equalTo(false))); }); } @Test void shouldNotBeEmptyWithAuthority() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse("//example.com"); // When boolean empty = uriBuilder.isEmpty(); // Then assertThat("Parsed with: " + method, empty, is(equalTo(false))); }); } @Test void shouldNotBeEmptyWithPath() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse("/path"); // When boolean empty = uriBuilder.isEmpty(); // Then assertThat("Parsed with: " + method, empty, is(equalTo(false))); }); } @Test void shouldCopy() { PARSE_METHODS.forEach( method -> { // Given UriBuilder original = method.parse("/path"); // When UriBuilder copy = original.copy(); // Then assertUriComponents( method, copy, is(equalTo(original.getScheme())), is(equalTo(original.getAuthority())), is(equalTo(original.getPath()))); assertThat( "Parsed with: " + method, copy.toString(), is(equalTo(original.toString()))); }); } @Test void shouldBuildWithSchemeAndAuthority() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse("http://example.com"); // When String url = uriBuilder.build(); // Then assertThat("Parsed with: " + method, url, is(equalTo("http://example.com"))); }); } @Test void shouldBuildWithSchemeAuthorityAndPath() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse("http://example.com/path"); // When String url = uriBuilder.build(); // Then assertThat( "Parsed with: " + method, url, is(equalTo("http://example.com/path"))); }); } @Test void shouldBuildAfterMergeRelativePathWithNoPath() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = UriBuilder.parse("relativePath"); UriBuilder otherUrlBuilder = method.parse("http://example.com"); // When String url = uriBuilder.merge(otherUrlBuilder).build(); // Then assertThat( "Parsed with: " + method, url, is(equalTo("http://example.com/relativePath"))); }); } @Test void shouldBuildAfterMergeRelativePathWithEmptyPath() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = UriBuilder.parse("relativePath"); UriBuilder otherUrlBuilder = method.parse("http://example.com/"); // When String url = uriBuilder.merge(otherUrlBuilder).build(); // Then assertThat( "Parsed with: " + method, url, is(equalTo("http://example.com/relativePath"))); }); } @Test void shouldBuildAfterMergeAbsolutePathWithNoPath() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = UriBuilder.parse("/absolutePath"); UriBuilder otherUrlBuilder = method.parse("http://example.com"); // When String url = uriBuilder.merge(otherUrlBuilder).build(); // Then assertThat( "Parsed with: " + method, url, is(equalTo("http://example.com/absolutePath"))); }); } @Test void shouldBuildAfterMergeAbsolutePathWithEmptyPath() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = UriBuilder.parse("/absolutePath"); UriBuilder otherUrlBuilder = method.parse("http://example.com/"); // When String url = uriBuilder.merge(otherUrlBuilder).build(); // Then assertThat( "Parsed with: " + method, url, is(equalTo("http://example.com/absolutePath"))); }); } @Test void shouldBuildRemovingSlashAtTheEnd() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse("http://example.com/path/"); // When String url = uriBuilder.build(); // Then assertThat( "Parsed with: " + method, url, is(equalTo("http://example.com/path"))); }); } @Test void shouldBuildNormalisingPath() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse("http://example.com/path/../other/."); // When String url = uriBuilder.build(); // Then assertThat( "Parsed with: " + method, url, is(equalTo("http://example.com/other"))); }); } @Test void shouldFailToBuildWithNoScheme() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse("//example.com/"); // When / Then IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> uriBuilder.build()); assertThat("Parsed with: " + method, e.getMessage(), containsString("scheme")); }); } @Test void shouldFailToBuildWithNoAuthority() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse("http://"); // When / Then IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> uriBuilder.build()); assertThat( "Parsed with: " + method, e.getMessage(), containsString("authority")); }); } @Test void shouldFailToBuildWithMalformedUri() { PARSE_METHODS.forEach( method -> { // Given UriBuilder uriBuilder = method.parse("http://x%0"); // When / Then IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> uriBuilder.build()); assertThat( "Parsed with: " + method, e.getMessage(), containsString("normalise")); }); } private static class ParseMethod { final String name; private final Function<String, UriBuilder> method; ParseMethod(String name, Function<String, UriBuilder> method) { this.name = name; this.method = method; } UriBuilder parse(String value) { return method.apply(value); } } }
/* * Copyright [2005] [University Corporation for Advanced Internet Development, Inc.] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * */ package org.opensaml.saml2.core.impl; import org.opensaml.common.impl.AbstractSAMLObjectBuilder; import org.opensaml.common.xml.SAMLConstants; import org.opensaml.saml2.core.AssertionIDRef; /** * Builder for {@link org.opensaml.saml2.core.impl.AssertionIDRefImpl}. */ public class AssertionIDRefBuilder extends AbstractSAMLObjectBuilder<AssertionIDRef> { /** Constructor. */ public AssertionIDRefBuilder() { } /** {@inheritDoc} */ public AssertionIDRef buildObject() { return buildObject(SAMLConstants.SAML20_NS, AssertionIDRef.DEFAULT_ELEMENT_LOCAL_NAME, SAMLConstants.SAML20_PREFIX); } /** {@inheritDoc} */ public AssertionIDRef buildObject(String namespaceURI, String localName, String namespacePrefix) { return new AssertionIDRefImpl(namespaceURI, localName, namespacePrefix); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.viewer.restfulobjects.server.resources; import org.apache.isis.applib.services.health.Health; import org.apache.isis.viewer.restfulobjects.applib.JsonRepresentation; import org.apache.isis.viewer.restfulobjects.applib.RepresentationType; import org.apache.isis.viewer.restfulobjects.rendering.LinkFollowSpecs; import org.apache.isis.viewer.restfulobjects.rendering.RendererContext; import org.apache.isis.viewer.restfulobjects.rendering.ReprRendererAbstract; public class HealthReprRenderer extends ReprRendererAbstract<HealthReprRenderer, Health> { HealthReprRenderer( final RendererContext resourceContext, final LinkFollowSpecs linkFollower, final JsonRepresentation representation) { super(resourceContext, linkFollower, RepresentationType.HEALTH, representation); } @Override public HealthReprRenderer with(final Health health) { final boolean result = health.getResult(); representation.mapPut("ok", result); if(!result) { representation.mapPut("message", health.getMessage()); } return this; } @Override public JsonRepresentation render() { return representation; } }
package microbrowser.data.model; public class Discussion extends Node { private String title; private String body; private prefuse.data.Node _node; public Discussion(prefuse.data.Node n) { this.id = n.getInt("id"); this.title = n.getString("title"); this.body = n.getString("body"); setOriginalNode(n); } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getBody() { return body; } public void setBody(String body) { this.body = body; } public prefuse.data.Node getOriginalNode() { return _node; } public void setOriginalNode(prefuse.data.Node _node) { this._node = _node; } }
/* * Copyright (c) 2012-2015 iWave Software LLC * All Rights Reserved */ package com.emc.sa.service.vipr.file.tasks; import java.net.URI; import com.emc.sa.service.vipr.tasks.WaitForTask; import com.emc.storageos.model.file.FileShareRestRep; import com.emc.storageos.model.file.FileSystemExpandParam; import com.emc.vipr.client.Task; public class ExpandFileSystem extends WaitForTask<FileShareRestRep> { private URI fileSystemId; private String newSize; public ExpandFileSystem(String fileSystemId, String newSize) { this(uri(fileSystemId), newSize); } public ExpandFileSystem(URI fileSystemId, String newSize) { super(); this.fileSystemId = fileSystemId; this.newSize = newSize; provideDetailArgs(fileSystemId, newSize); } @Override protected Task<FileShareRestRep> doExecute() throws Exception { FileSystemExpandParam expand = new FileSystemExpandParam(); expand.setNewSize(newSize); return getClient().fileSystems().expand(fileSystemId, expand); } }
package comprehensive; import org.junit.Before; import org.junit.Test; import org.mimosaframework.core.json.ModelObject; import org.mimosaframework.core.utils.RandomUtils; import org.mimosaframework.orm.*; import org.mimosaframework.orm.criteria.Criteria; import org.mimosaframework.orm.exception.ContextException; import tables.TableOrder; import tables.TablePay; import tables.TableUser; import java.util.List; public class RunJoinSession { private SessionTemplate template; @Before public void init() throws ContextException { if (template == null) { template = RunDataSourceBuilder.currTemplate(); } } @Test public void testLeftJoin() { List<ModelObject> objects = template.list(Criteria.query(TableUser.class) .subjoin(Criteria.left(TablePay.class).on(TablePay.userId, TableUser.id).aliasName("pays")) .limit(0, 3)); System.out.println(objects); } @Test public void testInnerJoin() { for (int i = 0; i < 20; i++) { ModelObject user = new ModelObject(TableUser.class); user.put(TableUser.userName, RandomUtils.randomIgnoreCaseAlphanumeric(10)); template.save(user); ModelObject pay = new ModelObject(TablePay.class); pay.put(TablePay.userId, user.getIntValue(TableUser.id)); template.save(pay); pay.remove(TablePay.id); template.save(pay); } List<ModelObject> objects = template.list(Criteria.query(TableUser.class) .subjoin(Criteria.inner(TablePay.class).on(TableUser.id, TablePay.userId).single().aliasName("pays")) .limit(0, 20)); System.out.println(objects.size()); System.out.println(objects); } @Test public void testChildJoin() { List<ModelObject> objects = template.list(Criteria.query(TableUser.class) .subjoin( Criteria.inner(TablePay.class) .subjoin(Criteria.left(TableOrder.class).on(TableOrder.userId, TablePay.userId).aliasName("orders")) .on(TablePay.userId, TableUser.id).single().aliasName("pays") ) .limit(0, 20)); System.out.println(objects); } @Test public void testInnerJoinNoLimit() { List<ModelObject> objects = template.list(Criteria.query(TableUser.class) .subjoin(Criteria.inner(TablePay.class).on(TablePay.userId, TableUser.id).single().aliasName("pays")) .gte(TableUser.id, 1).lte(TableUser.id, 20)); System.out.println(objects.size()); System.out.println(objects); } @Test public void testIgnore() { List<ModelObject> objects = template.list(Criteria.query(TableUser.class) .subjoin( Criteria.inner(TablePay.class) .subjoin(Criteria.left(TableOrder.class).on(TablePay.userId, TableOrder.userId).aliasName("orders")) .on(TablePay.userId, TableUser.id).ignore().single().aliasName("pays") ) .limit(0, 3)); System.out.println(objects); } @Test public void testCustomTableAlias() { List<ModelObject> objects = template.list(Criteria.query(TableUser.class) .subjoin( Criteria.inner(TablePay.class) .subjoin( Criteria.left(TableOrder.class) .on(TablePay.userId, TableOrder.userId) .aliasName("orders").as("order") ).on(TablePay.userId, TableUser.id).aliasName("pays").as("pay") ) .filter(Criteria.filter() .eq("status", "10") // .eq("id", "1") .as("pays")) .limit(0, 3)); System.out.println(objects); } }
/** * Copyright 2009-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ibatis.builder; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.ibatis.mapping.ParameterMapping; import org.apache.ibatis.mapping.SqlSource; import org.apache.ibatis.parsing.GenericTokenParser; import org.apache.ibatis.parsing.TokenHandler; import org.apache.ibatis.reflection.MetaClass; import org.apache.ibatis.reflection.MetaObject; import org.apache.ibatis.session.Configuration; import org.apache.ibatis.type.JdbcType; /** * @author Clinton Begin */ public class SqlSourceBuilder extends BaseBuilder { private static final String PARAMETER_PROPERTIES = "javaType,jdbcType,mode,numericScale,resultMap,typeHandler,jdbcTypeName"; public SqlSourceBuilder(Configuration configuration) { super(configuration); } /** * @param originalSql 原始的SQL字符串(包含#{}占位符) * @param parameterType 入参的classType * @param additionalParameters 元素别名Map * @return */ public SqlSource parse(String originalSql, Class<?> parameterType, Map<String, Object> additionalParameters) { ParameterMappingTokenHandler handler = new ParameterMappingTokenHandler(configuration, parameterType, additionalParameters); // 解析sql中的占位符#{}, GenericTokenParser parser = new GenericTokenParser("#{", "}", handler); String sql = parser.parse(originalSql); return new StaticSqlSource(configuration, sql, handler.getParameterMappings()); } private static class ParameterMappingTokenHandler extends BaseBuilder implements TokenHandler { private List<ParameterMapping> parameterMappings = new ArrayList<>(); private Class<?> parameterType; private MetaObject metaParameters; public ParameterMappingTokenHandler(Configuration configuration, Class<?> parameterType, Map<String, Object> additionalParameters) { super(configuration); this.parameterType = parameterType; this.metaParameters = configuration.newMetaObject(additionalParameters); } public List<ParameterMapping> getParameterMappings() { return parameterMappings; } @Override public String handleToken(String content) { parameterMappings.add(buildParameterMapping(content)); return "?"; } private ParameterMapping buildParameterMapping(String content) { Map<String, String> propertiesMap = parseParameterMapping(content); String property = propertiesMap.get("property"); Class<?> propertyType; if (metaParameters.hasGetter(property)) { // issue #448 get type from additional params propertyType = metaParameters.getGetterType(property); } else if (typeHandlerRegistry.hasTypeHandler(parameterType)) { propertyType = parameterType; } else if (JdbcType.CURSOR.name().equals(propertiesMap.get("jdbcType"))) { propertyType = java.sql.ResultSet.class; } else if (property == null || Map.class.isAssignableFrom(parameterType)) { propertyType = Object.class; } else { MetaClass metaClass = MetaClass.forClass(parameterType, configuration.getReflectorFactory()); if (metaClass.hasGetter(property)) { propertyType = metaClass.getGetterType(property); } else { propertyType = Object.class; } } ParameterMapping.Builder builder = new ParameterMapping.Builder(configuration, property, propertyType); Class<?> javaType = propertyType; String typeHandlerAlias = null; for (Map.Entry<String, String> entry : propertiesMap.entrySet()) { String name = entry.getKey(); String value = entry.getValue(); if ("javaType".equals(name)) { javaType = resolveClass(value); builder.javaType(javaType); } else if ("jdbcType".equals(name)) { builder.jdbcType(resolveJdbcType(value)); } else if ("mode".equals(name)) { builder.mode(resolveParameterMode(value)); } else if ("numericScale".equals(name)) { builder.numericScale(Integer.valueOf(value)); } else if ("resultMap".equals(name)) { builder.resultMapId(value); } else if ("typeHandler".equals(name)) { typeHandlerAlias = value; } else if ("jdbcTypeName".equals(name)) { builder.jdbcTypeName(value); } else if ("property".equals(name)) { // Do Nothing } else if ("expression".equals(name)) { throw new BuilderException("Expression based parameters are not supported yet"); } else { throw new BuilderException("An invalid property '" + name + "' was found in mapping #{" + content + "}. Valid properties are " + PARAMETER_PROPERTIES); } } if (typeHandlerAlias != null) { builder.typeHandler(resolveTypeHandler(javaType, typeHandlerAlias)); } return builder.build(); } private Map<String, String> parseParameterMapping(String content) { try { return new ParameterExpression(content); } catch (BuilderException ex) { throw ex; } catch (Exception ex) { throw new BuilderException("Parsing error was found in mapping #{" + content + "}. Check syntax #{property|(expression), var1=value1, var2=value2, ...} ", ex); } } } }
package org.deeplearning4j.nearestneighbor.server; import com.beust.jcommander.JCommander; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParameterException; import lombok.extern.slf4j.Slf4j; import org.apache.commons.io.IOUtils; import org.deeplearning4j.clustering.sptree.DataPoint; import org.deeplearning4j.clustering.vptree.VPTree; import org.deeplearning4j.clustering.vptree.VPTreeFillSearch; import org.deeplearning4j.exception.DL4JInvalidInputException; import org.deeplearning4j.nearestneighbor.model.*; import org.jboss.netty.util.internal.ByteBufferUtil; import org.nd4j.linalg.api.buffer.DataBuffer; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.shape.Shape; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.indexing.NDArrayIndex; import org.nd4j.serde.base64.Nd4jBase64; import org.nd4j.serde.binary.BinarySerde; import play.Mode; import play.libs.Json; import play.routing.RoutingDsl; import play.server.Server; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collections; import java.util.List; import static play.mvc.Controller.request; import static play.mvc.Results.badRequest; import static play.mvc.Results.internalServerError; import static play.mvc.Results.ok; /** * A rest server for using an * {@link VPTree} based on loading an ndarray containing * the data points for the path * The input values are an {@link CSVRecord} * which (based on the input schema) will automatically * have their values transformed. * * @author Adam Gibson */ @Slf4j public class NearestNeighborsServer { @Parameter(names = {"--ndarrayPath"}, arity = 1, required = true) private String ndarrayPath = null; @Parameter(names = {"--nearestNeighborsPort"}, arity = 1) private int port = 9000; @Parameter(names = {"--similarityFunction"}, arity = 1) private String similarityFunction = "euclidean"; @Parameter(names = {"--invert"}, arity = 1) private boolean invert = false; private Server server; public void runMain(String... args) throws Exception { JCommander jcmdr = new JCommander(this); try { jcmdr.parse(args); } catch (ParameterException e) { //User provides invalid input -> print the usage info jcmdr.usage(); if (ndarrayPath == null) System.err.println("Json path parameter is missing."); try { Thread.sleep(500); } catch (Exception e2) { } System.exit(1); } String[] pathArr = ndarrayPath.split(","); //INDArray[] pointsArr = new INDArray[pathArr.length]; // first of all we reading shapes of saved eariler files int rows = 0; int cols = 0; for (int i = 0; i < pathArr.length; i++) { DataBuffer shape = BinarySerde.readShapeFromDisk(new File(pathArr[i])); log.info("Loading shape {} of {}; Shape: [{} x {}]", i+1, pathArr.length, Shape.size(shape, 0), Shape.size(shape, 1)); if (Shape.rank(shape) != 2) throw new DL4JInvalidInputException("NearestNeighborsServer assumes 2D chunks"); rows += Shape.size(shape, 0); if (cols == 0) cols = Shape.size(shape, 1); else if (cols != Shape.size(shape, 1)) throw new DL4JInvalidInputException("NearestNeighborsServer requires equal 2D chunks. Got columns mismatch."); } final INDArray points = Nd4j.createUninitialized(rows, cols); int lastPosition = 0; for (int i = 0; i < pathArr.length; i++) { log.info("Loading chunk {} of {}", i+1, pathArr.length); INDArray pointsArr = BinarySerde.readFromDisk(new File(pathArr[i])); points.get(NDArrayIndex.interval(lastPosition, lastPosition + pointsArr.rows())).assign(pointsArr); lastPosition += pointsArr.rows(); // let's ensure we don't bring too much stuff in next loop System.gc(); } VPTree tree = new VPTree(points, similarityFunction, invert); RoutingDsl routingDsl = new RoutingDsl(); //return the host information for a given id routingDsl.POST("/knn").routeTo(FunctionUtil.function0((() -> { try { NearestNeighborRequest record = Json.fromJson(request().body().asJson(), NearestNeighborRequest.class); NearestNeighbor nearestNeighbor = NearestNeighbor.builder().points(points).record(record).tree(tree).build(); if (record == null) return badRequest(Json.toJson(Collections.singletonMap("status", "invalid json passed."))); NearstNeighborsResults results = NearstNeighborsResults.builder().results(nearestNeighbor.search()).build(); return ok(Json.toJson(results)); } catch (Exception e) { e.printStackTrace(); return internalServerError(); } }))); routingDsl.POST("/knnnew").routeTo(FunctionUtil.function0((() -> { try { Base64NDArrayBody record = Json.fromJson(request().body().asJson(), Base64NDArrayBody.class); if (record == null) return badRequest(Json.toJson(Collections.singletonMap("status", "invalid json passed."))); INDArray arr = Nd4jBase64.fromBase64(record.getNdarray()); List<DataPoint> results; List<Double> distances; if (record.isForceFillK()) { VPTreeFillSearch vpTreeFillSearch = new VPTreeFillSearch(tree, record.getK(), arr); vpTreeFillSearch.search(); results = vpTreeFillSearch.getResults(); distances = vpTreeFillSearch.getDistances(); } else { results = new ArrayList<>(); distances = new ArrayList<>(); tree.search(arr, record.getK(), results, distances); } List<NearestNeighborsResult> nnResult = new ArrayList<>(); for (DataPoint dataPoint : results) { nnResult.add(new NearestNeighborsResult(dataPoint.getIndex())); } NearstNeighborsResults results2 = NearstNeighborsResults.builder().results(nnResult).build(); return ok(Json.toJson(results2)); } catch (Exception e) { e.printStackTrace(); return internalServerError(); } }))); server = Server.forRouter(routingDsl.build(), Mode.DEV, port); } /** * Stop the server */ public void stop() { if (server != null) server.stop(); } public static void main(String[] args) throws Exception { new NearestNeighborsServer().runMain(args); } }
package com.suver.nate.patientscheduler; import com.suver.nate.patientscheduler.Helpers.TaskDescriptionLookup; import com.suver.nate.patientscheduler.Models.OfficeSettings; import com.suver.nate.patientscheduler.Models.Token; import com.suver.nate.patientscheduler.Models.UserSetting; /** * Created by nates on 11/22/2017. */ public class ApplicationData { private static ApplicationData instance = null; protected ApplicationData() { } public static ApplicationData getInstance() { if(instance == null) { instance = new ApplicationData(); } return instance; } public static String tenant; public static UserSetting userSetting; public static OfficeSettings officeSettings; public static Token token; public static TaskDescriptionLookup tasks; }
/** * Copyright (c) 2011-2014, hubin (jobob@qq.com). * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.baomidou.mybatisplus.test; import java.util.ArrayList; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.UUID; import org.apache.ibatis.session.RowBounds; import org.apache.ibatis.session.SqlSession; import org.junit.Assert; import com.baomidou.mybatisplus.mapper.Condition; import com.baomidou.mybatisplus.mapper.EntityWrapper; import com.baomidou.mybatisplus.mapper.Wrapper; import com.baomidou.mybatisplus.test.mysql.entity.NotPK; import com.baomidou.mybatisplus.test.mysql.entity.Test; import com.baomidou.mybatisplus.test.mysql.mapper.NotPKMapper; import com.baomidou.mybatisplus.test.mysql.mapper.RoleMapper; import com.baomidou.mybatisplus.test.mysql.mapper.TestMapper; /** * <p> * 全局配置测试 * </p> * * @author Caratacus * @Date 2016-12-22 */ public class GlobalConfigurationTest extends CrudTest { /** * 全局配置测试 */ @org.junit.Test public void testGlobalConfig() { SqlSession session = this.sqlSessionFactory().openSession(false); TestMapper testMapper = session.getMapper(TestMapper.class); /*Wrapper type = Condition.instance().eq("id",1).or().in("type", new Object[]{1, 2, 3, 4, 5, 6}); List list = testMapper.selectList(type); System.out.println(list.toString());*/ Test test = new Test(); test.setCreateTime(new Date()); // 开启全局校验字符串会忽略空字符串 test.setType(""); testMapper.insert(test); SqlSession sqlSession = this.sqlSessionFactory().openSession(false); NotPKMapper pkMapper = sqlSession.getMapper(NotPKMapper.class); NotPK notPK = new NotPK(); notPK.setUuid(UUID.randomUUID().toString()); notPK.setIs(true); notPK.setIsis(false); int num = pkMapper.insert(notPK); Assert.assertTrue(num > 0); NotPK notPK1 = pkMapper.selectOne(notPK); Assert.assertNotNull(notPK1); List<NotPK> notPKS = pkMapper.selectPage(RowBounds.DEFAULT, new EntityWrapper<>(notPK)); System.out.println(notPKS); Wrapper type = Condition.create().eq("type", 12121212); Assert.assertFalse(type.isEmptyOfWhere()); System.out.println(type.getSqlSegment()); Assert.assertFalse(type.isEmptyOfWhere()); pkMapper.selectPage(RowBounds.DEFAULT, type); NotPK notPK2 = null; try { notPK2 = pkMapper.selectById("1"); } catch (Exception e) { System.out.println("因为没有主键,所以没有注入该方法"); } Assert.assertNull(notPK2); int count = pkMapper.selectCount(Condition.EMPTY); pkMapper.selectList(Condition.<NotPK>wrapper().orderBy("uuid")); pkMapper.selectList(Condition.<NotPK>wrapper().eq("uuid", "uuid").orderBy("uuid")); Assert.assertTrue(count > 0); int deleteCount = pkMapper.delete(null); Assert.assertTrue(deleteCount > 0); List<String> list = new ArrayList<>(); list.add("1"); list.add("2"); list.add("3"); RoleMapper mapper = sqlSession.getMapper(RoleMapper.class); System.out.println(mapper.selectBatchIds(list)); System.out.println(mapper.selectBatchIds(new HashSet<>(list))); sqlSession.commit(); } }
/* * * Copyright (c) 2013 - 2020 Lijun Liao * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.xipki.ca.api.mgmt; import com.alibaba.fastjson.JSON; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xipki.ca.api.CaUris; import org.xipki.ca.api.NameId; import org.xipki.ca.api.mgmt.entry.*; import org.xipki.ca.api.mgmt.entry.CaEntry.CaSignerConf; import org.xipki.security.*; import org.xipki.security.util.X509Util; import org.xipki.util.Base64; import org.xipki.util.*; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.security.cert.CertificateException; import java.util.*; import java.util.Map.Entry; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; /** * CA configuration. * * @author Lijun Liao * @since 2.1.0 */ public class CaConf { public static class GenSelfIssued { private final String profile; private final String subject; private final String serialNumber; public GenSelfIssued(String profile, String subject, String serialNumber) { this.profile = Args.notBlank(profile, "profile"); this.subject = Args.notBlank(subject, "subject"); this.serialNumber = serialNumber; } public String getProfile() { return profile; } public String getSubject() { return subject; } public String getSerialNumber() { return serialNumber; } } // class GenSelfIssued public static class SingleCa { private final String name; private final GenSelfIssued genSelfIssued; private final CaEntry caEntry; private final List<String> aliases; private final List<String> profileNames; private final List<CaHasRequestorEntry> requestors; private final List<CaHasUserEntry> users; private final List<String> publisherNames; public SingleCa(String name, GenSelfIssued genSelfIssued, CaEntry caEntry, List<String> aliases, List<String> profileNames, List<CaHasRequestorEntry> requestors, List<CaHasUserEntry> users, List<String> publisherNames) { this.name = Args.notBlank(name, "name"); if (genSelfIssued != null) { if (caEntry == null) { throw new IllegalArgumentException( "caEntry may not be null if genSelfIssued is non-null"); } if ((caEntry).getCert() != null) { throw new IllegalArgumentException( "caEntry.cert may not be null if genSelfIssued is non-null"); } } this.genSelfIssued = genSelfIssued; this.caEntry = caEntry; this.aliases = aliases; this.profileNames = profileNames; this.requestors = requestors; this.users = users; this.publisherNames = publisherNames; } // constructor public String getName() { return name; } public CaEntry getCaEntry() { return caEntry; } public List<String> getAliases() { return aliases; } public GenSelfIssued getGenSelfIssued() { return genSelfIssued; } public List<String> getProfileNames() { return profileNames; } public List<CaHasRequestorEntry> getRequestors() { return requestors; } public List<CaHasUserEntry> getUsers() { return users; } public List<String> getPublisherNames() { return publisherNames; } } // class SingleCa private static final Logger LOG = LoggerFactory.getLogger(CaConf.class); private final Map<String, String> properties = new HashMap<>(); private final Map<String, SignerEntry> signers = new HashMap<>(); private final Map<String, RequestorEntry> requestors = new HashMap<>(); private final Map<String, Object> users = new HashMap<>(); private final Map<String, PublisherEntry> publishers = new HashMap<>(); private final Map<String, CertprofileEntry> certprofiles = new HashMap<>(); private final Map<String, SingleCa> cas = new HashMap<>(); public CaConf(File confFile, SecurityFactory securityFactory) throws IOException, InvalidConfException, CaMgmtException { Args.notNull(securityFactory, "securityFactory"); confFile = IoUtil.expandFilepath(Args.notNull(confFile, "confFile"), true); init(Files.newInputStream(confFile.toPath()), securityFactory); } public CaConf(InputStream confFileZipStream, SecurityFactory securityFactory) throws IOException, InvalidConfException, CaMgmtException { Args.notNull(confFileZipStream, "confFileZipStream"); Args.notNull(securityFactory, "securityFactory"); init(confFileZipStream, securityFactory); } private void init(InputStream zipFileStream, SecurityFactory securityFactory) throws IOException, InvalidConfException, CaMgmtException { ZipInputStream zipStream = new ZipInputStream(zipFileStream); try { Map<String, byte[]> zipEntries = new HashMap<>(); ZipEntry zipEntry; while ((zipEntry = zipStream.getNextEntry()) != null) { byte[] zipEntryBytes = read(zipStream); zipEntries.put(zipEntry.getName(), zipEntryBytes); } CaConfType.CaSystem root = JSON.parseObject(zipEntries.get("caconf.json"), CaConfType.CaSystem.class); root.validate(); init0(root, zipEntries, securityFactory); } finally { try { zipFileStream.close(); } catch (IOException ex) { LOG.info("could not close zipFileStream: {}", ex.getMessage()); } try { zipStream.close(); } catch (IOException ex) { LOG.info("could not close zipStream: {}", ex.getMessage()); } } } // method init private void init0(CaConfType.CaSystem root, Map<String, byte[]> zipEntries, SecurityFactory securityFactory) throws IOException, InvalidConfException, CaMgmtException { if (root.getProperties() != null) { properties.putAll(root.getProperties()); } // Signers if (root.getSigners() != null) { for (CaConfType.Signer m : root.getSigners()) { SignerEntry en = new SignerEntry(m.getName(), expandConf(m.getType()), getValue(m.getConf(), zipEntries), getBase64Binary(m.getCert(), zipEntries)); addSigner(en); } } // Requestors if (root.getRequestors() != null) { for (CaConfType.Requestor m : root.getRequestors()) { String conf; if (m.getConf() != null) { conf = getValue(m.getConf(), zipEntries); } else { conf = getBase64Binary(m.getBinaryConf(), zipEntries); } RequestorEntry en = new RequestorEntry(new NameId(null, m.getName()), m.getType(), conf); addRequestor(en); } } // Users if (root.getUsers() != null) { for (CaConfType.User m : root.getUsers()) { boolean active = m.isActive(); String password = m.getPassword(); if (password != null) { AddUserEntry en = new AddUserEntry(new NameId(null, m.getName()), active, password); addUser(en); } else { UserEntry en = new UserEntry(new NameId(null, m.getName()), active, m.getHashedPassword()); addUser(en); } } } // Publishers if (root.getPublishers() != null) { for (CaConfType.NameTypeConf m : root.getPublishers()) { PublisherEntry en = new PublisherEntry(new NameId(null, m.getName()), expandConf(m.getType()), getValue(m.getConf(), zipEntries)); addPublisher(en); } } // Profiles if (root.getProfiles() != null) { for (CaConfType.NameTypeConf m : root.getProfiles()) { CertprofileEntry en = new CertprofileEntry(new NameId(null, m.getName()), expandConf(m.getType()), getValue(m.getConf(), zipEntries)); addProfile(en); } } // CAs if (root.getCas() != null) { for (CaConfType.Ca m : root.getCas()) { String name = m.getName(); GenSelfIssued genSelfIssued = null; CaEntry caEntry = null; if (m.getCaInfo() != null) { CaConfType.CaInfo ci = m.getCaInfo(); if (ci.getGenSelfIssued() != null) { if (ci.getCert() != null) { throw new InvalidConfException("cert.file of CA " + name + " may not be set"); } String subject = ci.getGenSelfIssued().getSubject(); String serialNumber = ci.getGenSelfIssued().getSerialNumber(); genSelfIssued = new GenSelfIssued(ci.getGenSelfIssued().getProfile(), subject, serialNumber); } CaUris caUris; if (ci.getCaUris() == null) { caUris = CaUris.EMPTY_INSTANCE; } else { CaConfType.CaUris uris = ci.getCaUris(); caUris = new CaUris(uris.getCacertUris(), uris.getOcspUris(), uris.getCrlUris(), uris.getDeltacrlUris()); } int exprirationPeriod = (ci.getExpirationPeriod() == null) ? 365 : ci.getExpirationPeriod(); int numCrls = (ci.getNumCrls() == null) ? 30 : ci.getNumCrls(); caEntry = new CaEntry(new NameId(null, name), ci.getSnSize(), ci.getNextCrlNo(), expandConf(ci.getSignerType()), getValue(ci.getSignerConf(), zipEntries), caUris, numCrls, exprirationPeriod); if (CollectionUtil.isNotEmpty(ci.getCmpControl())) { caEntry.setCmpControl(new CmpControl( new ConfPairs(ci.getCmpControl()).getEncoded())); } if (ci.getCrlControl() != null) { caEntry.setCrlControl(new CrlControl( new ConfPairs(ci.getCrlControl()).getEncoded())); } if (ci.getScepControl() != null) { caEntry.setScepControl(new ScepControl( new ConfPairs(ci.getScepControl()).getEncoded())); } if (ci.getCtlogControl() != null) { caEntry.setCtlogControl(new CtlogControl( new ConfPairs(ci.getCtlogControl()).getEncoded())); } caEntry.setCmpResponderName(ci.getCmpResponderName()); caEntry.setScepResponderName(ci.getScepResponderName()); caEntry.setCrlSignerName(ci.getCrlSignerName()); if (ci.getExtraControl() != null) { caEntry.setExtraControl(new ConfPairs(ci.getExtraControl()).unmodifiable()); } int keepExpiredCertDays = (ci.getKeepExpiredCertDays() == null) ? -1 : ci.getKeepExpiredCertDays(); caEntry.setKeepExpiredCertInDays(keepExpiredCertDays); caEntry.setMaxValidity(Validity.getInstance(ci.getMaxValidity())); caEntry.setPermission(getIntPermission(ci.getPermissions())); if (ci.getProtocolSupport() != null) { caEntry.setProtocolSupport(new ProtocolSupport(ci.getProtocolSupport())); } if (ci.getDhpocControl() != null) { caEntry.setDhpocControl(getValue(ci.getDhpocControl(), zipEntries)); } if (ci.getRevokeSuspendedControl() != null) { caEntry.setRevokeSuspendedControl( new RevokeSuspendedControl( new ConfPairs(ci.getRevokeSuspendedControl()))); } caEntry.setSaveRequest(ci.isSaveReq()); caEntry.setStatus(CaStatus.forName(ci.getStatus())); if (ci.getValidityMode() != null) { caEntry.setValidityMode(ValidityMode.forName(ci.getValidityMode())); } if (ci.getGenSelfIssued() == null) { X509Cert caCert; if (ci.getCert() != null) { byte[] bytes = getBinary(ci.getCert(), zipEntries); try { caCert = X509Util.parseCert(bytes); } catch (CertificateException ex) { throw new InvalidConfException("invalid certificate of CA " + name, ex); } } else { // extract from the signer configuration ConcurrentContentSigner signer; try { List<CaSignerConf> signerConfs = CaEntry.splitCaSignerConfs( getValue(ci.getSignerConf(), zipEntries)); SignerConf signerConf = new SignerConf(signerConfs.get(0).getConf()); signer = securityFactory.createSigner(expandConf(ci.getSignerType()), signerConf, (X509Cert) null); } catch (ObjectCreationException | XiSecurityException ex) { throw new InvalidConfException("could not create CA signer for CA " + name, ex); } caCert = signer.getCertificate(); } caEntry.setCert(caCert); // certchain if (CollectionUtil.isNotEmpty(ci.getCertchain())) { List<X509Cert> certchain = new LinkedList<>(); for (FileOrBinary cc : ci.getCertchain()) { byte[] bytes = getBinary(cc, zipEntries); try { certchain.add(X509Util.parseCert(bytes)); } catch (CertificateException ex) { throw new InvalidConfException("invalid certchain for CA " + name, ex); } } caEntry.setCertchain(certchain); } } } List<CaHasRequestorEntry> caHasRequestors = null; if (m.getRequestors() != null) { caHasRequestors = new LinkedList<>(); for (CaConfType.CaHasRequestor req : m.getRequestors()) { CaHasRequestorEntry en = new CaHasRequestorEntry(new NameId(null, req.getRequestorName())); en.setRa(req.isRa()); if (req.getProfiles() != null && !req.getProfiles().isEmpty()) { en.setProfiles(new HashSet<>(req.getProfiles())); } en.setPermission(getIntPermission(req.getPermissions())); caHasRequestors.add(en); } } List<CaHasUserEntry> caHasUsers = null; if (m.getUsers() != null) { caHasUsers = new LinkedList<>(); for (CaConfType.CaHasUser req : m.getUsers()) { CaHasUserEntry en = new CaHasUserEntry(new NameId(null, req.getUserName())); en.setPermission(getIntPermission(req.getPermissions())); if (req.getProfiles() != null && !req.getProfiles().isEmpty()) { en.setProfiles(new HashSet<>(req.getProfiles())); } caHasUsers.add(en); } } List<String> aliases = null; if (m.getAliases() != null && !m.getAliases().isEmpty()) { aliases = m.getAliases(); } List<String> profileNames = null; if (m.getProfiles() != null && !m.getProfiles().isEmpty()) { profileNames = m.getProfiles(); } List<String> publisherNames = null; if (m.getPublishers() != null && !m.getPublishers().isEmpty()) { publisherNames = m.getPublishers(); } SingleCa singleCa = new SingleCa(name, genSelfIssued, caEntry, aliases, profileNames, caHasRequestors, caHasUsers, publisherNames); addSingleCa(singleCa); } } } // method init0 public void addSigner(SignerEntry signer) { Args.notNull(signer, "signer"); this.signers.put(signer.getName(), signer); } public Set<String> getSignerNames() { return Collections.unmodifiableSet(signers.keySet()); } public SignerEntry getSigner(String name) { return signers.get(Args.notNull(name, "name")); } public void addRequestor(RequestorEntry requestor) { Args.notNull(requestor, "requestor"); this.requestors.put(requestor.getIdent().getName(), requestor); } public void addUser(UserEntry user) { Args.notNull(user, "user"); this.users.put(user.getIdent().getName(), user); } public void addUser(AddUserEntry user) { Args.notNull(user, "user"); this.users.put(user.getIdent().getName(), user); } public Set<String> getRequestorNames() { return Collections.unmodifiableSet(requestors.keySet()); } public RequestorEntry getRequestor(String name) { return requestors.get(Args.notNull(name, "name")); } public Set<String> getUserNames() { return Collections.unmodifiableSet(users.keySet()); } public Object getUser(String name) { return users.get(Args.notNull(name, "name")); } public void addPublisher(PublisherEntry publisher) { Args.notNull(publisher, "publisher"); this.publishers.put(publisher.getIdent().getName(), publisher); } public Set<String> getPublisherNames() { return Collections.unmodifiableSet(publishers.keySet()); } public PublisherEntry getPublisher(String name) { return publishers.get(Args.notNull(name, "name")); } public void addProfile(CertprofileEntry profile) { Args.notNull(profile, "profile"); this.certprofiles.put(profile.getIdent().getName(), profile); } public Set<String> getCertprofileNames() { return Collections.unmodifiableSet(certprofiles.keySet()); } public CertprofileEntry getCertprofile(String name) { return certprofiles.get(Args.notNull(name, "name")); } public void addSingleCa(SingleCa singleCa) { Args.notNull(singleCa, "singleCa"); this.cas.put(singleCa.getName(), singleCa); } public Set<String> getCaNames() { return Collections.unmodifiableSet(cas.keySet()); } public SingleCa getCa(String name) { return cas.get(Args.notNull(name, "name")); } private String getValue(FileOrValue fileOrValue, Map<String, byte[]> zipEntries) throws IOException { if (fileOrValue == null) { return null; } if (fileOrValue.getValue() != null) { return expandConf(fileOrValue.getValue()); } String fileName = fileOrValue.getFile(); byte[] binary = zipEntries.get(fileName); if (binary == null) { throw new IOException("could not find ZIP entry " + fileName); } return expandConf(StringUtil.toUtf8String(binary)); } // method getValue private String getBase64Binary(FileOrBinary fileOrBinary, Map<String, byte[]> zipEntries) throws IOException { byte[] binary = getBinary(fileOrBinary, zipEntries); return (binary == null) ? null : Base64.encodeToString(binary); } private static byte[] read(InputStream in) throws IOException { ByteArrayOutputStream bout = new ByteArrayOutputStream(); int readed; byte[] buffer = new byte[2048]; while ((readed = in.read(buffer)) != -1) { bout.write(buffer, 0, readed); } return bout.toByteArray(); } // method read private byte[] getBinary(FileOrBinary fileOrBinary, Map<String, byte[]> zipEntries) throws IOException { if (fileOrBinary == null) { return null; } if (fileOrBinary.getBinary() != null) { return fileOrBinary.getBinary(); } String fileName = fileOrBinary.getFile(); byte[] binary = zipEntries.get(fileName); if (binary == null) { throw new IOException("could not find ZIP entry " + fileName); } return binary; } //method getBinary private String expandConf(String confStr) { if (confStr == null || !confStr.contains("${") || confStr.indexOf('}') == -1) { return confStr; } for (Entry<String, String> entry : properties.entrySet()) { String name = entry.getKey(); String placeHolder = "${" + name + "}"; while (confStr.contains(placeHolder)) { confStr = confStr.replace(placeHolder, entry.getValue()); } } return confStr; } // method expandConf private static int getIntPermission(List<String> permissions) throws InvalidConfException { int ret = 0; for (String permission : permissions) { Integer ii = PermissionConstants.getPermissionForText(permission); if (ii == null) { throw new InvalidConfException("invalid permission " + permission); } ret |= ii; } return ret; } // method getIntPermission }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.program.database.symbol; import db.DBRecord; import ghidra.program.database.DBObjectCache; import ghidra.program.model.address.Address; import ghidra.program.model.lang.Register; import ghidra.program.model.symbol.Namespace; import ghidra.program.model.symbol.SymbolType; import ghidra.program.util.ProgramLocation; /** * Symbols for global registers. */ public class GlobalRegisterSymbol extends SymbolDB { /** * Construct a new GlobalRegisterSymbol. * @param mgr the symbol manager * @param cache symbol object cache * @param addr the address for this symbol. * @param record the record for this symbol. */ public GlobalRegisterSymbol(SymbolManager mgr, DBObjectCache<SymbolDB> cache, Address addr, DBRecord record) { super(mgr, cache, addr, record); } /** * @see ghidra.program.model.symbol.Symbol#getSymbolType() */ public SymbolType getSymbolType() { return SymbolType.GLOBAL_VAR; } @Override public boolean isExternal() { return false; } /** * @see ghidra.program.model.symbol.Symbol#getObject() */ public Object getObject() { Register reg = symbolMgr.getProgram().getRegister(getAddress()); return reg; } /** * @see ghidra.program.model.symbol.Symbol#isPrimary() */ @Override public boolean isPrimary() { return true; } /** * @see ghidra.program.model.symbol.Symbol#getProgramLocation() */ public ProgramLocation getProgramLocation() { return null; } /** * @see ghidra.program.model.symbol.Symbol#isValidParent(ghidra.program.model.symbol.Namespace) */ @Override public boolean isValidParent(Namespace parent) { return SymbolType.GLOBAL_VAR.isValidParent(symbolMgr.getProgram(), parent, address, isExternal()); } }
/* * Copyright (C) 2016 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.syndesis.server.controller.integration.online; import java.util.Arrays; import java.util.Collections; import java.util.List; import io.syndesis.server.controller.ControllersConfigurationProperties; import io.syndesis.server.controller.StateChangeHandler; import io.syndesis.server.controller.StateChangeHandlerProvider; import io.syndesis.server.controller.integration.online.customizer.ExposureDeploymentDataCustomizer; import io.syndesis.server.dao.manager.DataManager; import io.syndesis.integration.api.IntegrationProjectGenerator; import io.syndesis.server.openshift.OpenShiftService; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Component; @Component @ConditionalOnProperty(value = "controllers.integration.enabled", havingValue = "true", matchIfMissing = true) public class OnlineHandlerProvider extends BaseHandler implements StateChangeHandlerProvider { private final DataManager dataManager; private final IntegrationProjectGenerator projectGenerator; private final ControllersConfigurationProperties properties; public OnlineHandlerProvider( DataManager dataManager, OpenShiftService openShiftService, IntegrationProjectGenerator projectGenerator, ControllersConfigurationProperties properties) { super(openShiftService); this.dataManager = dataManager; this.projectGenerator = projectGenerator; this.properties = properties; } @Override public List<StateChangeHandler> getStatusChangeHandlers() { return Arrays.asList( new PublishHandler( dataManager, openShiftService(), projectGenerator, properties, Collections.singletonList(new ExposureDeploymentDataCustomizer(properties)) ), new UnpublishHandler(openShiftService())); } }
/* * Copyright The OpenTelemetry Authors * SPDX-License-Identifier: Apache-2.0 */ package io.opentelemetry.api.metrics; import javax.annotation.concurrent.ThreadSafe; /** * Util class that can be use to atomically record measurements associated with a set of Metrics. * * <p>This class is equivalent with individually calling record on every Measure, but has the * advantage that all these operations are recorded atomically and it is more efficient. */ @ThreadSafe public interface BatchRecorder { /** * Associates the {@link LongValueRecorder} with the given value. Subsequent updates to the same * {@link LongValueRecorder} will overwrite the previous value. * * @param valueRecorder the {@link LongValueRecorder}. * @param value the value to be associated with {@code valueRecorder}. * @return this. */ BatchRecorder put(LongValueRecorder valueRecorder, long value); /** * Associates the {@link DoubleValueRecorder} with the given value. Subsequent updates to the same * {@link DoubleValueRecorder} will overwrite the previous value. * * @param valueRecorder the {@link DoubleValueRecorder}. * @param value the value to be associated with {@code valueRecorder}. * @return this. */ BatchRecorder put(DoubleValueRecorder valueRecorder, double value); /** * Associates the {@link LongCounter} with the given value. Subsequent updates to the same {@link * LongCounter} will overwrite the previous value. * * @param counter the {@link LongCounter}. * @param value the value to be associated with {@code counter}. * @return this. */ BatchRecorder put(LongCounter counter, long value); /** * Associates the {@link DoubleCounter} with the given value. Subsequent updates to the same * {@link DoubleCounter} will overwrite the previous value. * * @param counter the {@link DoubleCounter}. * @param value the value to be associated with {@code counter}. * @return this. */ BatchRecorder put(DoubleCounter counter, double value); /** * Associates the {@link LongUpDownCounter} with the given value. Subsequent updates to the same * {@link LongCounter} will overwrite the previous value. * * @param upDownCounter the {@link LongCounter}. * @param value the value to be associated with {@code counter}. * @return this. */ BatchRecorder put(LongUpDownCounter upDownCounter, long value); /** * Associates the {@link DoubleUpDownCounter} with the given value. Subsequent updates to the same * {@link DoubleCounter} will overwrite the previous value. * * @param upDownCounter the {@link DoubleCounter}. * @param value the value to be associated with {@code counter}. * @return this. */ BatchRecorder put(DoubleUpDownCounter upDownCounter, double value); /** * Records all of measurements at the same time. * * <p>This method records all measurements every time it is called, so make sure it is not called * twice if not needed. */ void record(); }
package org.sk.vtracker.simulator; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.scheduling.annotation.EnableAsync; import org.springframework.scheduling.annotation.EnableScheduling; import springfox.documentation.swagger2.annotations.EnableSwagger2; @EnableAsync @EnableSwagger2 @EnableScheduling @SpringBootApplication public class VehicleSimulatorApplication { public static void main(String[] args) { SpringApplication.run(VehicleSimulatorApplication.class, args); } }
package com.openvpn.vpn.Activities; import android.content.Intent; import android.os.Bundle; import android.widget.CompoundButton; import android.widget.RadioButton; import androidx.appcompat.app.AppCompatActivity; import androidx.lifecycle.MutableLiveData; import androidx.lifecycle.Observer; import com.anjlab.android.iab.v3.BillingProcessor; import com.anjlab.android.iab.v3.TransactionDetails; import com.openvpn.vpn.R; import com.openvpn.vpn.Config; import butterknife.BindView; import butterknife.ButterKnife; import butterknife.OnClick; public class UnlockAllActivity extends AppCompatActivity implements BillingProcessor.IBillingHandler { private BillingProcessor bp; private MutableLiveData<Integer> all_check = new MutableLiveData<>(); @BindView(R.id.one_month) RadioButton oneMonth; @BindView(R.id.three_month) RadioButton threeMonth; @BindView(R.id.six_month) RadioButton sixMonth; @BindView(R.id.one_year) RadioButton oneYear; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_unlock_all); ButterKnife.bind(this); all_check.setValue( -1); all_check.observe(this, new Observer<Integer>() { @Override public void onChanged(Integer integer) { switch (integer){ case 0: threeMonth.setChecked(false); sixMonth.setChecked(false); oneYear.setChecked(false); break; case 1: oneMonth.setChecked(false); sixMonth.setChecked(false); oneYear.setChecked(false); break; case 2: threeMonth.setChecked(false); oneMonth.setChecked(false); oneYear.setChecked(false); break; case 3: threeMonth.setChecked(false); sixMonth.setChecked(false); oneMonth.setChecked(false); break; } } }); bp = new BillingProcessor(this, Config.IAP_LISENCE_KEY, this); bp.initialize(); oneMonth.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if(isChecked) all_check.postValue(0); } }); threeMonth.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if(isChecked) all_check.postValue(1); } }); sixMonth.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if(isChecked) all_check.postValue(2); } }); oneYear.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if(isChecked) all_check.postValue(3); } }); } @Override public void onDestroy() { if (bp != null) { bp.release(); } super.onDestroy(); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (!bp.handleActivityResult(requestCode, resultCode, data)) { super.onActivityResult(requestCode, resultCode, data); } } @Override public void onProductPurchased(String productId, TransactionDetails details) { } @Override public void onPurchaseHistoryRestored() { } @Override public void onBillingError(int errorCode, Throwable error) { } @Override public void onBillingInitialized() { } private void unlock_all(int i) { switch (i) { case 0: bp.subscribe(UnlockAllActivity.this, Config.all_month_id); break; case 1: bp.subscribe(UnlockAllActivity.this, Config.all_threemonths_id); break; case 2: bp.subscribe(UnlockAllActivity.this, Config.all_sixmonths_id); break; case 3: bp.subscribe(UnlockAllActivity.this, Config.all_yearly_id); break; } } @OnClick(R.id.all_pur) void unlockAll(){ if(all_check.getValue() != null)unlock_all(all_check.getValue()); } }
/* * Copyright 1999-2015 dangdang.com. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * </p> */ package com.dangdang.ddframe.job.lite.lifecycle.internal.settings; import com.dangdang.ddframe.job.api.JobType; import com.dangdang.ddframe.job.config.dataflow.DataflowJobConfiguration; import com.dangdang.ddframe.job.config.script.ScriptJobConfiguration; import com.dangdang.ddframe.job.executor.handler.JobProperties.JobPropertiesEnum; import com.dangdang.ddframe.job.lite.config.LiteJobConfiguration; import com.dangdang.ddframe.job.lite.internal.config.LiteJobConfigurationGsonFactory; import com.dangdang.ddframe.job.lite.internal.storage.JobNodePath; import com.dangdang.ddframe.job.lite.lifecycle.api.JobSettingsAPI; import com.dangdang.ddframe.job.lite.lifecycle.domain.JobSettings; import com.dangdang.ddframe.job.reg.base.CoordinatorRegistryCenter; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import lombok.RequiredArgsConstructor; /** * 作业配置的实现类. * * @author zhangliang * @author caohao */ @RequiredArgsConstructor public final class JobSettingsAPIImpl implements JobSettingsAPI { private final CoordinatorRegistryCenter regCenter; @Override public JobSettings getJobSettings(final String jobName) { JobSettings result = new JobSettings(); JobNodePath jobNodePath = new JobNodePath(jobName); LiteJobConfiguration liteJobConfig = LiteJobConfigurationGsonFactory.fromJson(regCenter.get(jobNodePath.getConfigNodePath())); String jobType = liteJobConfig.getTypeConfig().getJobType().name(); buildSimpleJobSettings(jobName, result, liteJobConfig); if (JobType.DATAFLOW.name().equals(jobType)) { buildDataflowJobSettings(result, (DataflowJobConfiguration) liteJobConfig.getTypeConfig()); } if (JobType.SCRIPT.name().equals(jobType)) { buildScriptJobSettings(result, (ScriptJobConfiguration) liteJobConfig.getTypeConfig()); } return result; } private void buildSimpleJobSettings(final String jobName, final JobSettings result, final LiteJobConfiguration liteJobConfig) { result.setJobName(jobName); result.setJobType(liteJobConfig.getTypeConfig().getJobType().name()); result.setJobClass(liteJobConfig.getTypeConfig().getJobClass()); result.setShardingTotalCount(liteJobConfig.getTypeConfig().getCoreConfig().getShardingTotalCount()); result.setCron(liteJobConfig.getTypeConfig().getCoreConfig().getCron()); result.setShardingItemParameters(liteJobConfig.getTypeConfig().getCoreConfig().getShardingItemParameters()); result.setJobParameter(liteJobConfig.getTypeConfig().getCoreConfig().getJobParameter()); result.setMonitorExecution(liteJobConfig.isMonitorExecution()); result.setMaxTimeDiffSeconds(liteJobConfig.getMaxTimeDiffSeconds()); result.setMonitorPort(liteJobConfig.getMonitorPort()); result.setFailover(liteJobConfig.getTypeConfig().getCoreConfig().isFailover()); result.setMisfire(liteJobConfig.getTypeConfig().getCoreConfig().isMisfire()); result.setJobShardingStrategyClass(liteJobConfig.getJobShardingStrategyClass()); result.setDescription(liteJobConfig.getTypeConfig().getCoreConfig().getDescription()); result.setReconcileIntervalMinutes(liteJobConfig.getReconcileIntervalMinutes()); result.getJobProperties().put(JobPropertiesEnum.EXECUTOR_SERVICE_HANDLER.getKey(), liteJobConfig.getTypeConfig().getCoreConfig().getJobProperties().get(JobPropertiesEnum.EXECUTOR_SERVICE_HANDLER)); result.getJobProperties().put(JobPropertiesEnum.JOB_EXCEPTION_HANDLER.getKey(), liteJobConfig.getTypeConfig().getCoreConfig().getJobProperties().get(JobPropertiesEnum.JOB_EXCEPTION_HANDLER)); } private void buildDataflowJobSettings(final JobSettings result, final DataflowJobConfiguration config) { result.setStreamingProcess(config.isStreamingProcess()); } private void buildScriptJobSettings(final JobSettings result, final ScriptJobConfiguration config) { result.setScriptCommandLine(config.getScriptCommandLine()); } @Override public void updateJobSettings(final JobSettings jobSettings) { Preconditions.checkArgument(!Strings.isNullOrEmpty(jobSettings.getJobName()), "jobName can not be empty."); Preconditions.checkArgument(!Strings.isNullOrEmpty(jobSettings.getCron()), "cron can not be empty."); Preconditions.checkArgument(jobSettings.getShardingTotalCount() > 0, "shardingTotalCount should larger than zero."); JobNodePath jobNodePath = new JobNodePath(jobSettings.getJobName()); regCenter.update(jobNodePath.getConfigNodePath(), LiteJobConfigurationGsonFactory.toJsonForObject(jobSettings)); } @Override public void removeJobSettings(final String jobName) { regCenter.remove("/" + jobName); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.nodes.exec.spec; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCreator; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonIgnore; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonIgnoreProperties; import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonProperty; import java.util.Objects; /** * IntervalJoinSpec describes how two tables will be joined in interval join. * * <p>This class corresponds to {@link org.apache.calcite.rel.core.Join} rel node. the join * condition is splitted into two part: WindowBounds and JoinSpec: 1. WindowBounds contains the time * range condition. 2. JoinSpec contains rest of the join condition except windowBounds. */ @JsonIgnoreProperties(ignoreUnknown = true) public class IntervalJoinSpec { public static final String FIELD_NAME_WINDOW_BOUNDS = "windowBounds"; public static final String FIELD_NAME_JOIN_SPEC = "joinSpec"; @JsonProperty(FIELD_NAME_WINDOW_BOUNDS) private final WindowBounds windowBounds; @JsonProperty(FIELD_NAME_JOIN_SPEC) private final JoinSpec joinSpec; @JsonCreator public IntervalJoinSpec( @JsonProperty(FIELD_NAME_JOIN_SPEC) JoinSpec joinSpec, @JsonProperty(FIELD_NAME_WINDOW_BOUNDS) WindowBounds windowBounds) { this.windowBounds = windowBounds; this.joinSpec = joinSpec; } @JsonIgnore public WindowBounds getWindowBounds() { return windowBounds; } @JsonIgnore public JoinSpec getJoinSpec() { return joinSpec; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } IntervalJoinSpec that = (IntervalJoinSpec) o; return Objects.equals(windowBounds, that.windowBounds) && Objects.equals(joinSpec, that.joinSpec); } @Override public int hashCode() { return Objects.hash(windowBounds, joinSpec); } /** WindowBounds describes the time range condition of a Interval Join. */ @JsonIgnoreProperties(ignoreUnknown = true) public static class WindowBounds { public static final String FIELD_NAME_IS_EVENT_TIME = "isEventTime"; public static final String FIELD_NAME_LEFT_LOWER_BOUND = "leftLowerBound"; public static final String FIELD_NAME_LEFT_UPPER_BOUND = "leftUpperBound"; public static final String FIELD_NAME_LEFT_TIME_IDX = "leftTimeIndex"; public static final String FIELD_NAME_RIGHT_TIME_IDX = "rightTimeIndex"; @JsonProperty(FIELD_NAME_IS_EVENT_TIME) private final boolean isEventTime; @JsonProperty(FIELD_NAME_LEFT_LOWER_BOUND) private final long leftLowerBound; @JsonProperty(FIELD_NAME_LEFT_UPPER_BOUND) private final long leftUpperBound; @JsonProperty(FIELD_NAME_LEFT_TIME_IDX) private final int leftTimeIdx; @JsonProperty(FIELD_NAME_RIGHT_TIME_IDX) private final int rightTimeIdx; @JsonCreator public WindowBounds( @JsonProperty(FIELD_NAME_IS_EVENT_TIME) boolean isEventTime, @JsonProperty(FIELD_NAME_LEFT_LOWER_BOUND) long leftLowerBound, @JsonProperty(FIELD_NAME_LEFT_UPPER_BOUND) long leftUpperBound, @JsonProperty(FIELD_NAME_LEFT_TIME_IDX) int leftTimeIdx, @JsonProperty(FIELD_NAME_RIGHT_TIME_IDX) int rightTimeIdx) { this.isEventTime = isEventTime; this.leftLowerBound = leftLowerBound; this.leftUpperBound = leftUpperBound; this.leftTimeIdx = leftTimeIdx; this.rightTimeIdx = rightTimeIdx; } @JsonIgnore public boolean isEventTime() { return isEventTime; } @JsonIgnore public long getLeftLowerBound() { return leftLowerBound; } @JsonIgnore public long getLeftUpperBound() { return leftUpperBound; } @JsonIgnore public int getLeftTimeIdx() { return leftTimeIdx; } @JsonIgnore public int getRightTimeIdx() { return rightTimeIdx; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } WindowBounds that = (WindowBounds) o; return isEventTime == that.isEventTime && leftLowerBound == that.leftLowerBound && leftUpperBound == that.leftUpperBound && leftTimeIdx == that.leftTimeIdx && rightTimeIdx == that.rightTimeIdx; } @Override public int hashCode() { return Objects.hash( isEventTime, leftLowerBound, leftUpperBound, leftTimeIdx, rightTimeIdx); } } }
/* $Id$ ******************************************************************************* * Copyright (c) 2009 Contributors - see below * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * mvw ******************************************************************************* * * Some portions of this file was previously release using the BSD License: */ // Copyright (c) 2004-2006 The Regents of the University of California. All // Rights Reserved. Permission to use, copy, modify, and distribute this // software and its documentation without fee, and without a written // agreement is hereby granted, provided that the above copyright notice // and this paragraph appear in all copies. This software program and // documentation are copyrighted by The Regents of the University of // California. The software program and documentation are supplied "AS // IS", without any accompanying services from The Regents. The Regents // does not warrant that the operation of the program will be // uninterrupted or error-free. The end-user understands that the program // was developed for research purposes and is advised not to rely // exclusively on the program for any reason. IN NO EVENT SHALL THE // UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, // SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS, // ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF // THE UNIVERSITY OF CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY OF // SUCH DAMAGE. THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY // WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE // PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, AND THE UNIVERSITY OF // CALIFORNIA HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, SUPPORT, // UPDATES, ENHANCEMENTS, OR MODIFICATIONS. package org.argouml.model; /** * An interface to the OMG Diagram Interchange Model. Only implemented * by model implementations that wrap a repository that is aware of such * a model.<p> * * This interface is open for change as DI requirements become more clearly * understood. * @author Bob Tarling */ public interface DiagramInterchangeModel { DiDiagram createDiagram(Class type, Object owner); void deleteDiagram(DiDiagram diagram); DiElement createElement(DiDiagram diagram, Object modelElement); void deleteElement(DiElement diagram); // These methods are based on the GraphEvents. These need replacing by // more specic meaningful requests of the diagram interface model void nodeAdded(Object source, Object arg); void edgeAdded(Object source, Object arg); void nodeRemoved(Object source, Object arg); void edgeRemoved(Object source, Object arg); void graphChanged(Object source, Object arg); }
package br.com.livroandroid.carros.fragments; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.view.ActionMode; import android.support.v7.widget.DefaultItemAnimator; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import com.squareup.otto.Subscribe; import java.io.File; import java.util.ArrayList; import java.util.List; import br.com.livroandroid.carros.CarrosApplication; import br.com.livroandroid.carros.R; import br.com.livroandroid.carros.activity.CarroActivity; import br.com.livroandroid.carros.adapter.CarroAdapter; import br.com.livroandroid.carros.domain.Carro; import br.com.livroandroid.carros.domain.CarroDB; import br.com.livroandroid.carros.domain.CarroService; import livroandroid.lib.task.TaskListener; import livroandroid.lib.utils.AndroidUtils; import livroandroid.lib.utils.IOUtils; import livroandroid.lib.utils.SDCardUtils; public class CarrosFragment extends BaseFragment { protected RecyclerView recyclerView; private int tipo; private List<Carro> carros; private SwipeRefreshLayout swipeLayout; private ActionMode actionMode; private Intent shareIntent; // Método para instanciar esse fragment pelo tipo. public static CarrosFragment newInstance(int tipo) { Bundle args = new Bundle(); args.putInt("tipo", tipo); CarrosFragment f = new CarrosFragment(); f.setArguments(args); return f; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (getArguments() != null) { // Lê o tipo dos argumentos. this.tipo = getArguments().getInt("tipo"); } // Registra a classe para receber eventos. CarrosApplication.getInstance().getBus().register(this); } @Override public void onDestroy() { super.onDestroy(); // Cancela o recebimento de eventos. CarrosApplication.getInstance().getBus().unregister(this); } @Subscribe public void onBusAtualizarListaCarros(String refresh) { // Recebeu o evento, atualiza a lista. taskCarros(false); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_carros, container, false); recyclerView = (RecyclerView) view.findViewById(R.id.recyclerView); recyclerView.setLayoutManager(new LinearLayoutManager(getActivity())); recyclerView.setItemAnimator(new DefaultItemAnimator()); recyclerView.setHasFixedSize(true); // Swipe to Refresh swipeLayout = (SwipeRefreshLayout) view.findViewById(R.id.swipeToRefresh); swipeLayout.setOnRefreshListener(OnRefreshListener()); swipeLayout.setColorSchemeResources( R.color.refresh_progress_1, R.color.refresh_progress_2, R.color.refresh_progress_3); return view; } private SwipeRefreshLayout.OnRefreshListener OnRefreshListener() { return new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { // Valida se existe conexão ao fazer o gesto Pull to Refresh if (AndroidUtils.isNetworkAvailable(getContext())) { // Atualiza ao fazer o gesto Pull to Refresh taskCarros(true); } else { swipeLayout.setRefreshing(false); snack(recyclerView, R.string.msg_error_conexao_indisponivel); } } }; } @Override public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); taskCarros(false); } private void taskCarros(boolean pullToRefresh) { // Busca os carros: Dispara a Task startTask("carros", new GetCarrosTask(pullToRefresh), pullToRefresh ? R.id.swipeToRefresh : R.id.progress); } private CarroAdapter.CarroOnClickListener onClickCarro() { return new CarroAdapter.CarroOnClickListener() { @Override public void onClickCarro(View view, int idx) { Carro c = carros.get(idx); if (actionMode == null) { Intent intent = new Intent(getContext(), CarroActivity.class); intent.putExtra("carro", c); startActivity(intent); } else { // Se a CAB está ativada // Seleciona o carro c.selected = !c.selected; // Atualiza o título com a quantidade de carros selecionados updateActionModeTitle(); // Redesenha a lista recyclerView.getAdapter().notifyDataSetChanged(); } } @Override public void onLongClickCarro(View view, int idx) { if (actionMode != null) { return; } // Liga a action bar de contexto (CAB) actionMode = getAppCompatActivity(). startSupportActionMode(getActionModeCallback()); Carro c = carros.get(idx); c.selected = true; // Seleciona o carro // Solicita ao Android para desenhar a lista novamente recyclerView.getAdapter().notifyDataSetChanged(); // Atualiza o título para mostrar a quantidade de carros selecionados updateActionModeTitle(); } }; } // Atualiza o título da action bar (CAB) private void updateActionModeTitle() { if (actionMode != null) { actionMode.setTitle("Selecione os carros."); actionMode.setSubtitle(null); List<Carro> selectedCarros = getSelectedCarros(); if (selectedCarros.size() == 1) { actionMode.setSubtitle("1 carro selecionado"); } else if (selectedCarros.size() > 1) { actionMode.setSubtitle(selectedCarros.size() + " carros selecionados"); } updateShareIntent(selectedCarros); } } // Atualiza a share intent com os carros selecionados private void updateShareIntent(List<Carro> selectedCarros) { if (shareIntent != null) { // Texto com os carros shareIntent.putExtra(Intent.EXTRA_TEXT, "Carros: " + selectedCarros); } } // Retorna a lista de carros selecionados private List<Carro> getSelectedCarros() { List<Carro> list = new ArrayList<Carro>(); for (Carro c : carros) { if (c.selected) { list.add(c); } } return list; } private ActionMode.Callback getActionModeCallback() { return new ActionMode.Callback() { @Override public boolean onCreateActionMode(ActionMode mode, Menu menu) { // Infla o menu específico da action bar de contexto (CAB) MenuInflater inflater = getActivity().getMenuInflater(); inflater.inflate(R.menu.menu_frag_carros_cab, menu); MenuItem shareItem = menu.findItem(R.id.action_share); // ShareActionProvider share = (ShareActionProvider) MenuItemCompat.getActionProvider(shareItem); // shareIntent = new Intent(Intent.ACTION_SEND); // shareIntent.putExtra(android.content.Intent.EXTRA_SUBJECT, getString(R.string.app_name)); // shareIntent.setType("text/plain"); // share.setShareIntent(shareIntent); return true; } @Override public boolean onPrepareActionMode(ActionMode mode, Menu menu) { return true; } @Override public boolean onActionItemClicked(ActionMode mode, MenuItem item) { List<Carro> selectedCarros = getSelectedCarros(); if (item.getItemId() == R.id.action_remove) { CarroDB db = new CarroDB(getContext()); try { for (Carro c : selectedCarros) { db.delete(c); // Deleta o carro do banco carros.remove(c); // Remove da lista } } finally { db.close(); } snack(recyclerView, "Carros excluídos com sucesso."); } else if (item.getItemId() == R.id.action_share) { // Dispara a tarefa para fazer download das fotos startTask("compartilhar", new CompartilharTask(selectedCarros)); } // Encerra o action mode mode.finish(); return true; } @Override public void onDestroyActionMode(ActionMode mode) { // Limpa o estado actionMode = null; // Configura todos os carros para não selecionados for (Carro c : carros) { c.selected = false; } recyclerView.getAdapter().notifyDataSetChanged(); } }; } // Task para buscar os carros private class GetCarrosTask implements TaskListener<List<Carro>> { private boolean refresh; public GetCarrosTask(boolean refresh) { this.refresh = refresh; } @Override public List<Carro> execute() throws Exception { // Busca os carros em background (Thread) return CarroService.getCarros(getContext(), tipo, refresh); } @Override public void updateView(List<Carro> carros) { if (carros != null) { // Salva a lista de carros no atributo da classe CarrosFragment.this.carros = carros; // Atualiza a view na UI Thread recyclerView.setAdapter(new CarroAdapter(getContext(), carros, onClickCarro())); } } @Override public void onError(Exception e) { // Qualquer exceção lançada no método execute vai cair aqui. alert("Ocorreu algum erro ao buscar os dados."); } @Override public void onCancelled(String s) { } } // Task para fazer o download // Faça import da classe android.net.Uri; private class CompartilharTask implements TaskListener { private final List<Carro> selectedCarros; // Lista de arquivos para compartilhar ArrayList<Uri> imageUris = new ArrayList<Uri>(); public CompartilharTask(List<Carro> selectedCarros) { this.selectedCarros = selectedCarros; } @Override public Object execute() throws Exception { if (selectedCarros != null) { for (Carro c : selectedCarros) { // Faz o download da foto do carro para arquivo String url = c.urlFoto; String fileName = url.substring(url.lastIndexOf("/")); // Cria o arquivo no SD card File file = SDCardUtils.getPrivateFile(getContext(), "carros", fileName); IOUtils.downloadToFile(c.urlFoto, file); // Salva a Uri para compartilhar a foto imageUris.add(Uri.fromFile(file)); } } return null; } @Override public void updateView(Object o) { // Cria a intent com a foto dos carros Intent shareIntent = new Intent(); shareIntent.setAction(Intent.ACTION_SEND); shareIntent.setAction(Intent.ACTION_SEND_MULTIPLE); shareIntent.putExtra(android.content.Intent.EXTRA_SUBJECT, getString(R.string.app_name)); shareIntent.putParcelableArrayListExtra(Intent.EXTRA_STREAM, imageUris); shareIntent.setType("image/*"); // Cria o Intent Chooser com as opções startActivity(Intent.createChooser(shareIntent, "Enviar Carros")); } @Override public void onError(Exception e) { alert("Ocorreu algum erro ao compartilhar."); } @Override public void onCancelled(String s) { } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.InternalSettingsPlugin; import org.junit.Before; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.Set; import static org.hamcrest.Matchers.containsString; public abstract class AbstractNumericFieldMapperTestCase<T extends FieldMapper.Builder<?>> extends FieldMapperTestCase<T> { protected Set<String> TYPES; protected Set<String> WHOLE_TYPES; protected IndexService indexService; protected DocumentMapperParser parser; @Before public void setup() { indexService = createIndex("test"); parser = indexService.mapperService().documentMapperParser(); setTypeList(); } @Override protected Collection<Class<? extends Plugin>> getPlugins() { return pluginList(InternalSettingsPlugin.class); } protected abstract void setTypeList(); public void testDefaults() throws Exception { for (String type : TYPES) { doTestDefaults(type); } } protected abstract void doTestDefaults(String type) throws Exception; public void testNotIndexed() throws Exception { for (String type : TYPES) { doTestNotIndexed(type); } } protected abstract void doTestNotIndexed(String type) throws Exception; public void testNoDocValues() throws Exception { for (String type : TYPES) { doTestNoDocValues(type); } } protected abstract void doTestNoDocValues(String type) throws Exception; public void testStore() throws Exception { for (String type : TYPES) { doTestStore(type); } } protected abstract void doTestStore(String type) throws Exception; public void testCoerce() throws Exception { for (String type : TYPES) { doTestCoerce(type); } } protected abstract void doTestCoerce(String type) throws IOException; public void testDecimalCoerce() throws Exception { for (String type : WHOLE_TYPES) { doTestDecimalCoerce(type); } } protected abstract void doTestDecimalCoerce(String type) throws IOException; public void testNullValue() throws IOException { for (String type : TYPES) { doTestNullValue(type); } } protected abstract void doTestNullValue(String type) throws IOException; public void testEmptyName() throws IOException { // after version 5 for (String type : TYPES) { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("").field("type", type).endObject().endObject() .endObject().endObject()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); } } public void testMeta() throws Exception { for (String type : TYPES) { IndexService indexService = createIndex("test-" + type); String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc") .startObject("properties").startObject("field").field("type", type) .field("meta", Collections.singletonMap("foo", "bar")) .endObject().endObject().endObject().endObject()); DocumentMapper mapper = indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); assertEquals(mapping, mapper.mappingSource().toString()); String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc") .startObject("properties").startObject("field").field("type", type) .endObject().endObject().endObject().endObject()); mapper = indexService.mapperService().merge("_doc", new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE); assertEquals(mapping2, mapper.mappingSource().toString()); String mapping3 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc") .startObject("properties").startObject("field").field("type", type) .field("meta", Collections.singletonMap("baz", "quux")) .endObject().endObject().endObject().endObject()); mapper = indexService.mapperService().merge("_doc", new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE); assertEquals(mapping3, mapper.mappingSource().toString()); } } }
/* * Entagged Audio Tag library * Copyright (c) 2004-2005 Christian Laireiter <liree@web.de> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ package org.jaudiotagger.audio.asf; import org.jaudiotagger.audio.AudioFile; import org.jaudiotagger.audio.asf.data.AsfHeader; import org.jaudiotagger.audio.asf.data.AudioStreamChunk; import org.jaudiotagger.audio.asf.data.MetadataContainer; import org.jaudiotagger.audio.asf.data.MetadataDescriptor; import org.jaudiotagger.audio.asf.io.*; import org.jaudiotagger.audio.asf.util.TagConverter; import org.jaudiotagger.audio.asf.util.Utils; import org.jaudiotagger.audio.exceptions.CannotReadException; import org.jaudiotagger.audio.exceptions.InvalidAudioFrameException; import org.jaudiotagger.audio.exceptions.ReadOnlyFileException; import org.jaudiotagger.audio.generic.AudioFileReader; import org.jaudiotagger.audio.generic.GenericAudioHeader; import org.jaudiotagger.logging.ErrorMessage; import org.jaudiotagger.tag.TagException; import org.jaudiotagger.tag.asf.AsfTag; import java.io.*; import java.util.ArrayList; import java.util.List; import java.util.logging.Logger; /** * This reader can read ASF files containing any content (stream type). <br> * * @author Christian Laireiter */ public class AsfFileReader extends AudioFileReader { /** * Logger instance */ private final static Logger LOGGER = Logger.getLogger("org.jaudiotagger.audio.asf"); /** * This reader will be configured to read tag and audio header information.<br> */ private final static AsfHeaderReader HEADER_READER; static { final List<Class<? extends ChunkReader>> readers = new ArrayList<Class<? extends ChunkReader>>(); readers.add(ContentDescriptionReader.class); readers.add(ContentBrandingReader.class); readers.add(MetadataReader.class); readers.add(LanguageListReader.class); // Create the header extension object reader with just content // description reader as well // as extended content description reader. final AsfExtHeaderReader extReader = new AsfExtHeaderReader(readers, true); readers.add(FileHeaderReader.class); readers.add(StreamChunkReader.class); HEADER_READER = new AsfHeaderReader(readers, true); HEADER_READER.setExtendedHeaderReader(extReader); } /** * Determines if the &quot;isVbr&quot; field is set in the extended content * description.<br> * * @param header the header to look up. * @return <code>true</code> if &quot;isVbr&quot; is present with a * <code>true</code> value. */ private boolean determineVariableBitrate(final AsfHeader header) { assert header != null; boolean result = false; final MetadataContainer extDesc = header.findExtendedContentDescription(); if (extDesc != null) { final List<MetadataDescriptor> descriptors = extDesc.getDescriptorsByName("IsVBR"); if (descriptors != null && !descriptors.isEmpty()) { result = Boolean.TRUE.toString().equals(descriptors.get(0).getString()); } } return result; } /** * Creates a generic audio header instance with provided data from header. * * @param header ASF header which contains the information. * @return generic audio header representation. * @throws CannotReadException If header does not contain mandatory information. (Audio * stream chunk and file header chunk) */ private GenericAudioHeader getAudioHeader(final AsfHeader header) throws CannotReadException { final GenericAudioHeader info = new GenericAudioHeader(); if (header.getFileHeader() == null) { throw new CannotReadException("Invalid ASF/WMA file. File header object not available."); } if (header.getAudioStreamChunk() == null) { throw new CannotReadException("Invalid ASF/WMA file. No audio stream contained."); } info.setBitRate(header.getAudioStreamChunk().getKbps()); info.setChannelNumber((int) header.getAudioStreamChunk().getChannelCount()); info.setEncodingType("ASF (audio): " + header.getAudioStreamChunk().getCodecDescription()); info.setLossless(header.getAudioStreamChunk().getCompressionFormat() == AudioStreamChunk.WMA_LOSSLESS); info.setPreciseLength(header.getFileHeader().getPreciseDuration()); info.setSamplingRate((int) header.getAudioStreamChunk().getSamplingRate()); info.setVariableBitRate(determineVariableBitrate(header)); info.setBitsPerSample(header.getAudioStreamChunk().getBitsPerSample()); return info; } /** * (overridden) * * @see org.jaudiotagger.audio.generic.AudioFileReader#getEncodingInfo(RandomAccessFile) */ @Override protected GenericAudioHeader getEncodingInfo(final RandomAccessFile raf) throws CannotReadException, IOException { raf.seek(0); GenericAudioHeader info; try { final AsfHeader header = AsfHeaderReader.readInfoHeader(raf); if (header == null) { throw new CannotReadException("Some values must have been " + "incorrect for interpretation as asf with wma content."); } info = getAudioHeader(header); } catch (final Exception e) { if (e instanceof IOException) { throw (IOException) e; } else if (e instanceof CannotReadException) { throw (CannotReadException) e; } else { throw new CannotReadException("Failed to read. Cause: " + e.getMessage(), e); } } return info; } /** * Creates a tag instance with provided data from header. * * @param header ASF header which contains the information. * @return generic audio header representation. */ private AsfTag getTag(final AsfHeader header) { return TagConverter.createTagOf(header); } /** * (overridden) * * @see org.jaudiotagger.audio.generic.AudioFileReader#getTag(RandomAccessFile) */ @Override protected AsfTag getTag(final RandomAccessFile raf) throws CannotReadException, IOException { raf.seek(0); AsfTag tag; try { final AsfHeader header = AsfHeaderReader.readTagHeader(raf); if (header == null) { throw new CannotReadException("Some values must have been " + "incorrect for interpretation as asf with wma content."); } tag = TagConverter.createTagOf(header); } catch (final Exception e) { logger.severe(e.getMessage()); if (e instanceof IOException) { throw (IOException) e; } else if (e instanceof CannotReadException) { throw (CannotReadException) e; } else { throw new CannotReadException("Failed to read. Cause: " + e.getMessage()); } } return tag; } /** * {@inheritDoc} */ @Override public AudioFile read(final File f) throws CannotReadException, IOException, TagException, ReadOnlyFileException, InvalidAudioFrameException { if (!f.canRead()) { throw new CannotReadException(ErrorMessage.GENERAL_READ_FAILED_DO_NOT_HAVE_PERMISSION_TO_READ_FILE.getMsg(f.getAbsolutePath())); } InputStream stream = null; try { stream = new FullRequestInputStream(new BufferedInputStream(new FileInputStream(f))); final AsfHeader header = HEADER_READER.read(Utils.readGUID(stream), stream, 0); if (header == null) { throw new CannotReadException(ErrorMessage.ASF_HEADER_MISSING.getMsg(f.getAbsolutePath())); } if (header.getFileHeader() == null) { throw new CannotReadException(ErrorMessage.ASF_FILE_HEADER_MISSING.getMsg(f.getAbsolutePath())); } // Just log a warning because file seems to play okay if (header.getFileHeader().getFileSize().longValue() != f.length()) { logger.warning(ErrorMessage.ASF_FILE_HEADER_SIZE_DOES_NOT_MATCH_FILE_SIZE.getMsg(f.getAbsolutePath(), header.getFileHeader().getFileSize().longValue(), f.length())); } return new AudioFile(f, getAudioHeader(header), getTag(header)); } catch (final CannotReadException e) { throw e; } catch (final Exception e) { throw new CannotReadException("\"" + f + "\" :" + e, e); } finally { try { if (stream != null) { stream.close(); } } catch (final Exception ex) { LOGGER.severe("\"" + f + "\" :" + ex); } } } }
package de.hsmainz.cs.semgis.arqextension.test.unit; import static org.junit.Assert.assertEquals; import org.apache.jena.sparql.expr.NodeValue; import org.junit.jupiter.api.Test; import de.hsmainz.cs.semgis.arqextension.unit.LinkToMeter; public class LinkToMeterTest { @Test public void testlinkToMeter() { NodeValue unitamount = NodeValue.makeDouble(1.); LinkToMeter instance=new LinkToMeter(); NodeValue expResult = NodeValue.makeDouble(0.201168); NodeValue result = instance.exec(unitamount); System.out.println(result); assertEquals(expResult, result); } }
package com.volokh.danylo.video_player_manager.manager; import android.content.res.AssetFileDescriptor; import android.view.View; import com.volokh.danylo.video_player_manager.ui.VideoPlayerView; import com.volokh.danylo.video_player_manager.meta.MetaData; /** * This is a general interface for VideoPlayerManager * It supports : * 1. Start playback of new video by calling: * a) {@link #playNewVideo(MetaData, VideoPlayerView, String)} if you have direct url or path to video source * b) {@link #playNewVideo(MetaData, VideoPlayerView, AssetFileDescriptor)} is your video file is in assets directory * 2. Stop existing playback. {@link #stopAnyPlayback()} * 3. Reset Media Player if it's no longer needed. {@link #resetMediaPlayer()} */ public interface VideoPlayerManager<T extends MetaData> { /** * Call it if you have direct url or path to video source * @param metaData - optional Meta Data * @param videoPlayerView - the actual video player * @param videoUrl - the link to the video source */ void playNewVideo(T metaData, VideoPlayerView videoPlayerView, String videoUrl); /** * Call it if you have video source in assets directory * @param metaData - optional Meta Data * @param videoPlayerView - the actual video player * @param assetFileDescriptor -The asset descriptor of the video file */ void playNewVideo(T metaData, VideoPlayerView videoPlayerView, AssetFileDescriptor assetFileDescriptor); /** * Call it if you need to stop any playback that is currently playing */ void stopAnyPlayback(); /** * Call it if you no longer need the player */ void resetMediaPlayer(); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.tugraz.sysds.runtime.instructions.gpu.context; import static jcuda.runtime.JCuda.cudaMemGetInfo; import static jcuda.runtime.JCuda.cudaMemset; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.LongAdder; import java.util.stream.Collectors; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.tugraz.sysds.api.DMLScript; import org.tugraz.sysds.conf.DMLConfig; import org.tugraz.sysds.hops.OptimizerUtils; import org.tugraz.sysds.runtime.DMLRuntimeException; import org.tugraz.sysds.runtime.instructions.gpu.GPUInstruction; import org.tugraz.sysds.utils.GPUStatistics; import jcuda.Pointer; /** * - All cudaFree and cudaMalloc in SystemML should go through this class to avoid OOM or incorrect results. * - This class can be refactored in future to accept a chunk of memory ahead of time rather than while execution. This will only thow memory-related errors during startup. */ public class GPUMemoryManager { protected static final Log LOG = LogFactory.getLog(GPUMemoryManager.class.getName()); // Developer flag: Use this flag to check for GPU memory leak in SystemML. // This has an additional overhead of maintaining stack trace of all the allocated GPU pointers via PointerInfo class. private static final boolean DEBUG_MEMORY_LEAK = false; private static final int [] DEBUG_MEMORY_LEAK_STACKTRACE_DEPTH = {5, 6, 7, 8, 9, 10}; // Avoids printing too much text while debuggin protected final GPUMemoryAllocator allocator; /*****************************************************************************************/ // GPU Memory is divided into three major sections: // 1. Matrix Memory: Memory allocated to matrices in SystemML and addressable by GPUObjects. // This memory section is divided into three minor sections: // 1.1 Locked Matrix Memory // 1.2 UnLocked + Non-Dirty Matrix Memory // 1.3 UnLocked + Dirty Matrix Memory // To get the GPUObjects/Pointers in this section, please use getGPUObjects and getPointers methods of GPUMatrixMemoryManager. // To clear GPUObjects/Pointers in this section, please use clear and clearAll methods of GPUMatrixMemoryManager. // Both these methods allow to get/clear unlocked/locked and dirty/non-dirty objects of a certain size. protected final GPUMatrixMemoryManager matrixMemoryManager; public GPUMatrixMemoryManager getGPUMatrixMemoryManager() { return matrixMemoryManager; } // 2. Rmvar-ed pointers: If sysml.gpu.eager.cudaFree is set to false, // then this manager caches pointers of the GPUObject on which rmvar instruction has been executed for future reuse. // We observe 2-3x improvement with this approach and hence recommend to set this flag to false. protected final GPULazyCudaFreeMemoryManager lazyCudaFreeMemoryManager; public GPULazyCudaFreeMemoryManager getGPULazyCudaFreeMemoryManager() { return lazyCudaFreeMemoryManager; } // 3. Non-matrix locked pointers: Other pointers (required for execution of an instruction that are not memory). For example: workspace // These pointers are not explicitly tracked by a memory manager but one can get them by using getNonMatrixLockedPointers private Set<Pointer> getNonMatrixLockedPointers() { Set<Pointer> managedPointers = matrixMemoryManager.getPointers(); managedPointers.addAll(lazyCudaFreeMemoryManager.getAllPointers()); return nonIn(allPointers.keySet(), managedPointers); } /** * To record size of all allocated pointers allocated by above memory managers */ protected final HashMap<Pointer, PointerInfo> allPointers = new HashMap<>(); /*****************************************************************************************/ /** * Get size of allocated GPU Pointer * @param ptr pointer to get size of * @return either the size or -1 if no such pointer exists */ public long getSizeAllocatedGPUPointer(Pointer ptr) { if(allPointers.containsKey(ptr)) { return allPointers.get(ptr).getSizeInBytes(); } return -1; } /** * Utility to debug memory leaks */ static class PointerInfo { private long sizeInBytes; private StackTraceElement[] stackTraceElements; public PointerInfo(long sizeInBytes) { if(DEBUG_MEMORY_LEAK) { this.stackTraceElements = Thread.currentThread().getStackTrace(); } this.sizeInBytes = sizeInBytes; } public long getSizeInBytes() { return sizeInBytes; } } // If the available free size is less than this factor, GPUMemoryManager will warn users of multiple programs grabbing onto GPU memory. // This often happens if user tries to use both TF and SystemML, and TF grabs onto 90% of the memory ahead of time. private static final double WARN_UTILIZATION_FACTOR = 0.7; public GPUMemoryManager(GPUContext gpuCtx) { matrixMemoryManager = new GPUMatrixMemoryManager(this); lazyCudaFreeMemoryManager = new GPULazyCudaFreeMemoryManager(this); if(DMLScript.GPU_MEMORY_ALLOCATOR.equals("cuda")) { allocator = new CudaMemoryAllocator(); } else if(DMLScript.GPU_MEMORY_ALLOCATOR.equals("unified_memory")) { allocator = new UnifiedMemoryAllocator(); } else { throw new RuntimeException("Unsupported value (" + DMLScript.GPU_MEMORY_ALLOCATOR + ") for the configuration " + DMLConfig.GPU_MEMORY_ALLOCATOR + ". Supported values are cuda, unified_memory."); } long free[] = { 0 }; long total[] = { 0 }; cudaMemGetInfo(free, total); if(free[0] < WARN_UTILIZATION_FACTOR*total[0]) { LOG.warn("Potential under-utilization: GPU memory - Total: " + (total[0] * (1e-6)) + " MB, Available: " + (free[0] * (1e-6)) + " MB on " + gpuCtx + ". This can happen if there are other processes running on the GPU at the same time."); } else { LOG.info("GPU memory - Total: " + (total[0] * (1e-6)) + " MB, Available: " + (free[0] * (1e-6)) + " MB on " + gpuCtx); } if (GPUContextPool.initialGPUMemBudget() > OptimizerUtils.getLocalMemBudget()) { LOG.warn("Potential under-utilization: GPU memory (" + GPUContextPool.initialGPUMemBudget() + ") > driver memory budget (" + OptimizerUtils.getLocalMemBudget() + "). " + "Consider increasing the driver memory budget."); } } /** * Invoke cudaMalloc * * @param A pointer * @param size size in bytes * @param printDebugMessage debug message * @return allocated pointer */ private Pointer cudaMallocNoWarn(Pointer A, long size, String printDebugMessage) { long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0; try { allocator.allocate(A, size); allPointers.put(A, new PointerInfo(size)); if(DMLScript.STATISTICS) { long totalTime = System.nanoTime() - t0; GPUStatistics.cudaAllocSuccessTime.add(totalTime); GPUStatistics.cudaAllocSuccessCount.increment(); GPUStatistics.cudaAllocTime.add(totalTime); GPUStatistics.cudaAllocCount.increment(); } if(printDebugMessage != null && (DMLScript.PRINT_GPU_MEMORY_INFO || LOG.isTraceEnabled()) ) { LOG.info("Success: " + printDebugMessage + ":" + byteCountToDisplaySize(size)); } return A; } catch(jcuda.CudaException e) { if(DMLScript.STATISTICS) { long totalTime = System.nanoTime() - t0; GPUStatistics.cudaAllocFailedTime.add(System.nanoTime() - t0); GPUStatistics.cudaAllocFailedCount.increment(); GPUStatistics.cudaAllocTime.add(totalTime); GPUStatistics.cudaAllocCount.increment(); } if(printDebugMessage != null && (DMLScript.PRINT_GPU_MEMORY_INFO || LOG.isTraceEnabled()) ) { LOG.info("Failed: " + printDebugMessage + ":" + byteCountToDisplaySize(size)); LOG.info("GPU Memory info " + printDebugMessage + ":" + toString()); } return null; } } /** * Pretty printing utility to debug OOM error * * @param stackTrace stack trace * @param index call depth * @return pretty printed string */ private String getCallerInfo(StackTraceElement [] stackTrace, int index) { if(stackTrace.length <= index) return "->"; else return "->" + stackTrace[index].getClassName() + "." + stackTrace[index].getMethodName() + "(" + stackTrace[index].getFileName() + ":" + stackTrace[index].getLineNumber() + ")"; } /** * Pretty printing utility to print bytes * * @param numBytes number of bytes * @return a human-readable display value */ private String byteCountToDisplaySize(long numBytes) { // return org.apache.commons.io.FileUtils.byteCountToDisplaySize(bytes); // performs rounding if (numBytes < 1024) { return numBytes + " bytes"; } else { int exp = (int) (Math.log(numBytes) / 6.931471805599453); return String.format("%.3f %sB", ((double)numBytes) / Math.pow(1024, exp), "KMGTP".charAt(exp-1)); } } /** * Allocate pointer of the given size in bytes. * * @param opcode instruction name * @param size size in bytes * @return allocated pointer */ public Pointer malloc(String opcode, long size) { if(size < 0) { throw new DMLRuntimeException("Cannot allocate memory of size " + byteCountToDisplaySize(size)); } if(DEBUG_MEMORY_LEAK) { LOG.info("GPU Memory info during malloc:" + toString()); } // Step 1: First try reusing exact match in rmvarGPUPointers to avoid holes in the GPU memory Pointer A = lazyCudaFreeMemoryManager.getRmvarPointer(opcode, size); Pointer tmpA = (A == null) ? new Pointer() : null; // Step 2: Allocate a new pointer in the GPU memory (since memory is available) // Step 3 has potential to create holes as well as limit future reuse, hence perform this step before step 3. if(A == null && allocator.canAllocate(size)) { // This can fail in case of fragmented memory, so don't issue any warning A = cudaMallocNoWarn(tmpA, size, "allocate a new pointer"); } // Step 3: Try reusing non-exact match entry of rmvarGPUPointers if(A == null) { A = lazyCudaFreeMemoryManager.getRmvarPointerMinSize(opcode, size); if(A != null) { guardedCudaFree(A); A = cudaMallocNoWarn(tmpA, size, "reuse non-exact match of rmvarGPUPointers"); if(A == null) LOG.warn("cudaMalloc failed after clearing one of rmvarGPUPointers."); } } // Step 4: Eagerly free-up rmvarGPUPointers and check if memory is available on GPU // Evictions of matrix blocks are expensive (as they might lead them to be written to disk in case of smaller CPU budget) // than doing cuda free/malloc/memset. So, rmvar-ing every blocks (step 4) is preferred to eviction (step 5). if(A == null) { lazyCudaFreeMemoryManager.clearAll(); if(allocator.canAllocate(size)) { // This can fail in case of fragmented memory, so don't issue any warning A = cudaMallocNoWarn(tmpA, size, "allocate a new pointer after eager free"); } } // Step 5: Try eviction/clearing exactly one with size restriction if(A == null) { long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0; Optional<GPUObject> sizeBasedUnlockedGPUObjects = matrixMemoryManager.gpuObjects.stream() .filter(gpuObj -> !gpuObj.isLocked() && matrixMemoryManager.getWorstCaseContiguousMemorySize(gpuObj) >= size) .min((o1, o2) -> worstCaseContiguousMemorySizeCompare(o1, o2)); if(sizeBasedUnlockedGPUObjects.isPresent()) { evictOrClear(sizeBasedUnlockedGPUObjects.get(), opcode); A = cudaMallocNoWarn(tmpA, size, null); if(A == null) LOG.warn("cudaMalloc failed after clearing/evicting based on size."); if(DMLScript.STATISTICS) { long totalTime = System.nanoTime() - t0; GPUStatistics.cudaEvictTime.add(totalTime); GPUStatistics.cudaEvictSizeTime.add(totalTime); GPUStatistics.cudaEvictCount.increment(); GPUStatistics.cudaEvictSizeCount.increment(); } } } // Step 6: Try eviction/clearing one-by-one based on the given policy without size restriction if(A == null) { long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0; long currentAvailableMemory = allocator.getAvailableMemory(); boolean canFit = false; // --------------------------------------------------------------- // Evict unlocked GPU objects one-by-one and try malloc List<GPUObject> unlockedGPUObjects = matrixMemoryManager.gpuObjects.stream() .filter(gpuObj -> !gpuObj.isLocked()).collect(Collectors.toList()); Collections.sort(unlockedGPUObjects, new EvictionPolicyBasedComparator(size)); while(A == null && unlockedGPUObjects.size() > 0) { GPUObject evictedGPUObject = unlockedGPUObjects.remove(unlockedGPUObjects.size()-1); evictOrClear(evictedGPUObject, opcode); if(!canFit) { currentAvailableMemory += evictedGPUObject.getSizeOnDevice(); if(currentAvailableMemory >= size) canFit = true; } if(canFit) { // Checking before invoking cudaMalloc reduces the time spent in unnecessary cudaMalloc. // This was the bottleneck for ResNet200 experiments with batch size > 32 on P100+Intel A = cudaMallocNoWarn(tmpA, size, null); } if(DMLScript.STATISTICS) GPUStatistics.cudaEvictCount.increment(); } if(DMLScript.STATISTICS) { long totalTime = System.nanoTime() - t0; GPUStatistics.cudaEvictTime.add(totalTime); } } // Step 7: Handle defragmentation if(A == null) { LOG.warn("Potential fragmentation of the GPU memory. Forcibly evicting all ..."); LOG.info("Before clearAllUnlocked, GPU Memory info:" + toString()); matrixMemoryManager.clearAllUnlocked(opcode); LOG.info("GPU Memory info after evicting all unlocked matrices:" + toString()); A = cudaMallocNoWarn(tmpA, size, null); } if(A == null) { throw new DMLRuntimeException("There is not enough memory on device for this matrix, requested = " + byteCountToDisplaySize(size) + ". \n " + toString()); } long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0; cudaMemset(A, 0, size); addMiscTime(opcode, GPUStatistics.cudaMemSet0Time, GPUStatistics.cudaMemSet0Count, GPUInstruction.MISC_TIMER_SET_ZERO, t0); return A; } private int worstCaseContiguousMemorySizeCompare(GPUObject o1, GPUObject o2) { long ret = matrixMemoryManager.getWorstCaseContiguousMemorySize(o1) - matrixMemoryManager.getWorstCaseContiguousMemorySize(o2); return ret < 0 ? -1 : (ret == 0 ? 0 : 1); } private void evictOrClear(GPUObject gpuObj, String opcode) { boolean eagerDelete = true; if(gpuObj.isDirty()) { // Eviction gpuObj.copyFromDeviceToHost(opcode, true, eagerDelete); } else { // Clear without copying gpuObj.clearData(opcode, eagerDelete); } } // --------------- Developer Utilities to debug potential memory leaks ------------------------ private void printPointers(Set<Pointer> pointers, StringBuilder sb) { HashMap<String, Integer> frequency = new HashMap<>(); for(Pointer ptr : pointers) { PointerInfo ptrInfo = allPointers.get(ptr); String key = ""; for(int index : DEBUG_MEMORY_LEAK_STACKTRACE_DEPTH) { key += getCallerInfo(ptrInfo.stackTraceElements, index); } if(frequency.containsKey(key)) { frequency.put(key, frequency.get(key)+1); } else { frequency.put(key, 1); } } for(Entry<String, Integer> kv : frequency.entrySet()) { sb.append(">>" + kv.getKey() + " => " + kv.getValue() + "\n"); } } // -------------------------------------------------------------------------------------------- /** * Note: This method should not be called from an iterator as it removes entries from allocatedGPUPointers and rmvarGPUPointers * * @param toFree pointer to call cudaFree method on */ void guardedCudaFree(Pointer toFree) { if(allPointers.containsKey(toFree)) { long size = allPointers.get(toFree).getSizeInBytes(); if(LOG.isTraceEnabled()) { LOG.trace("Free-ing up the pointer of size " + byteCountToDisplaySize(size)); } allPointers.remove(toFree); lazyCudaFreeMemoryManager.removeIfPresent(size, toFree); allocator.free(toFree); if(DMLScript.SYNCHRONIZE_GPU) jcuda.runtime.JCuda.cudaDeviceSynchronize(); // Force a device synchronize after free-ing the pointer for debugging } else { throw new RuntimeException("Attempting to free an unaccounted pointer:" + toFree); } } /** * Deallocate the pointer * * @param opcode instruction name * @param toFree pointer to free * @param eager whether to deallocate eagerly * @throws DMLRuntimeException if error occurs */ public void free(String opcode, Pointer toFree, boolean eager) throws DMLRuntimeException { if(LOG.isTraceEnabled()) LOG.trace("Free-ing the pointer with eager=" + eager); if (eager) { long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0; guardedCudaFree(toFree); addMiscTime(opcode, GPUStatistics.cudaDeAllocTime, GPUStatistics.cudaDeAllocCount, GPUInstruction.MISC_TIMER_CUDA_FREE, t0); } else { if (!allPointers.containsKey(toFree)) { LOG.info("GPU memory info before failure:" + toString()); throw new RuntimeException("ERROR : Internal state corrupted, cache block size map is not aware of a block it trying to free up"); } long size = allPointers.get(toFree).getSizeInBytes(); lazyCudaFreeMemoryManager.add(size, toFree); } } /** * Removes the GPU object from the memory manager * * @param gpuObj the handle to the GPU object */ public void removeGPUObject(GPUObject gpuObj) { if(LOG.isDebugEnabled()) LOG.debug("Removing the GPU object: " + gpuObj); matrixMemoryManager.gpuObjects.remove(gpuObj); } /** * Clear the allocated GPU objects */ public void clearMemory() { // First deallocate all the GPU objects for(GPUObject gpuObj : matrixMemoryManager.gpuObjects) { if(gpuObj.isDirty()) { if(LOG.isDebugEnabled()) LOG.debug("Attempted to free GPU Memory when a block[" + gpuObj + "] is still on GPU memory, copying it back to host."); gpuObj.copyFromDeviceToHost(null, true, true); } else gpuObj.clearData(null, true); } matrixMemoryManager.gpuObjects.clear(); // Then clean up remaining allocated GPU pointers Set<Pointer> remainingPtr = new HashSet<>(allPointers.keySet()); for(Pointer toFree : remainingPtr) { guardedCudaFree(toFree); // cleans up allocatedGPUPointers and rmvarGPUPointers as well } allPointers.clear(); } /** * Performs a non-in operation * * @param superset superset of pointer * @param subset subset of pointer * @return pointers such that: superset - subset */ private Set<Pointer> nonIn(Set<Pointer> superset, Set<Pointer> subset) { Set<Pointer> ret = new HashSet<Pointer>(); for(Pointer superPtr : superset) { if(!subset.contains(superPtr)) { ret.add(superPtr); } } return ret; } /** * Clears up the memory used by non-dirty pointers. */ public void clearTemporaryMemory() { // To record the cuda block sizes needed by allocatedGPUObjects, others are cleared up. Set<Pointer> unlockedDirtyPointers = matrixMemoryManager.getPointers(false, true); Set<Pointer> temporaryPointers = nonIn(allPointers.keySet(), unlockedDirtyPointers); for(Pointer tmpPtr : temporaryPointers) { guardedCudaFree(tmpPtr); } } /** * Convenient method to add misc timers * * @param opcode opcode * @param globalGPUTimer member of GPUStatistics * @param globalGPUCounter member of GPUStatistics * @param instructionLevelTimer member of GPUInstruction * @param startTime start time */ private void addMiscTime(String opcode, LongAdder globalGPUTimer, LongAdder globalGPUCounter, String instructionLevelTimer, long startTime) { if(DMLScript.STATISTICS) { long totalTime = System.nanoTime() - startTime; globalGPUTimer.add(totalTime); globalGPUCounter.add(1); if (opcode != null && DMLScript.FINEGRAINED_STATISTICS) GPUStatistics.maintainCPMiscTimes(opcode, instructionLevelTimer, totalTime); } } /** * Convenient method to add misc timers * * @param opcode opcode * @param instructionLevelTimer member of GPUInstruction * @param startTime start time */ void addMiscTime(String opcode, String instructionLevelTimer, long startTime) { if (opcode != null && DMLScript.FINEGRAINED_STATISTICS) GPUStatistics.maintainCPMiscTimes(opcode, instructionLevelTimer, System.nanoTime() - startTime); } /** * Print debugging information */ @SuppressWarnings("unused") public String toString() { long sizeOfLockedGPUObjects = 0; int numLockedGPUObjects = 0; int numLockedPointers = 0; long sizeOfUnlockedDirtyGPUObjects = 0; int numUnlockedDirtyGPUObjects = 0; int numUnlockedDirtyPointers = 0; long sizeOfUnlockedNonDirtyGPUObjects = 0; int numUnlockedNonDirtyGPUObjects = 0; int numUnlockedNonDirtyPointers = 0; for(GPUObject gpuObj : matrixMemoryManager.gpuObjects) { if(gpuObj.isLocked()) { numLockedGPUObjects++; sizeOfLockedGPUObjects += gpuObj.getSizeOnDevice(); numLockedPointers += matrixMemoryManager.getPointers(gpuObj).size(); } else { if(gpuObj.isDirty()) { numUnlockedDirtyGPUObjects++; sizeOfUnlockedDirtyGPUObjects += gpuObj.getSizeOnDevice(); numUnlockedDirtyPointers += matrixMemoryManager.getPointers(gpuObj).size(); } else { numUnlockedNonDirtyGPUObjects++; sizeOfUnlockedNonDirtyGPUObjects += gpuObj.getSizeOnDevice(); numUnlockedNonDirtyPointers += matrixMemoryManager.getPointers(gpuObj).size(); } } } long totalMemoryAllocated = 0; for(PointerInfo ptrInfo : allPointers.values()) { totalMemoryAllocated += ptrInfo.getSizeInBytes(); } Set<Pointer> potentiallyLeakyPointers = getNonMatrixLockedPointers(); List<Long> sizePotentiallyLeakyPointers = potentiallyLeakyPointers.stream(). map(ptr -> allPointers.get(ptr).sizeInBytes).collect(Collectors.toList()); long totalSizePotentiallyLeakyPointers = 0; for(long size : sizePotentiallyLeakyPointers) { totalSizePotentiallyLeakyPointers += size; } StringBuilder ret = new StringBuilder(); if(DEBUG_MEMORY_LEAK && potentiallyLeakyPointers.size() > 0) { ret.append("Non-matrix pointers were allocated by:\n"); printPointers(potentiallyLeakyPointers, ret); } ret.append("\n====================================================\n"); ret.append(String.format("%-35s%-15s%-15s%-15s\n", "", "Num Objects", "Num Pointers", "Size")); ret.append(String.format("%-35s%-15s%-15s%-15s\n", "Unlocked Dirty GPU objects", numUnlockedDirtyGPUObjects, numUnlockedDirtyPointers, byteCountToDisplaySize(sizeOfUnlockedDirtyGPUObjects))); ret.append(String.format("%-35s%-15s%-15s%-15s\n", "Unlocked NonDirty GPU objects", numUnlockedNonDirtyGPUObjects, numUnlockedNonDirtyPointers, byteCountToDisplaySize(sizeOfUnlockedNonDirtyGPUObjects))); ret.append(String.format("%-35s%-15s%-15s%-15s\n", "Locked GPU objects", numLockedGPUObjects, numLockedPointers, byteCountToDisplaySize(sizeOfLockedGPUObjects))); ret.append(String.format("%-35s%-15s%-15s%-15s\n", "Cached rmvar-ed pointers", "-", lazyCudaFreeMemoryManager.getNumPointers(), byteCountToDisplaySize(lazyCudaFreeMemoryManager.getTotalMemoryAllocated()))); ret.append(String.format("%-35s%-15s%-15s%-15s\n", "Non-matrix/non-cached pointers", "-", potentiallyLeakyPointers.size(), byteCountToDisplaySize(totalSizePotentiallyLeakyPointers))); ret.append(String.format("%-35s%-15s%-15s%-15s\n", "All pointers", "-", allPointers.size(), byteCountToDisplaySize(totalMemoryAllocated))); long free[] = { 0 }; long total[] = { 0 }; cudaMemGetInfo(free, total); ret.append(String.format("%-35s%-15s%-15s%-15s\n", "Free mem (from cudaMemGetInfo)", "-", "-", byteCountToDisplaySize(free[0]))); ret.append(String.format("%-35s%-15s%-15s%-15s\n", "Total mem (from cudaMemGetInfo)", "-", "-", byteCountToDisplaySize(total[0]))); ret.append("====================================================\n"); return ret.toString(); } /** * Class that governs the eviction policy */ public static class EvictionPolicyBasedComparator implements Comparator<GPUObject> { public EvictionPolicyBasedComparator(long neededSize) { } @Override public int compare(GPUObject p1, GPUObject p2) { if (p1.isLocked() && p2.isLocked()) { // Both are locked, so don't sort return 0; } else if (p1.isLocked()) { // Put the unlocked one to RHS // a value less than 0 if x < y; and a value greater than 0 if x > y return -1; } else if (p2.isLocked()) { // Put the unlocked one to RHS // a value less than 0 if x < y; and a value greater than 0 if x > y return 1; } else { // Both are unlocked return Long.compare(p2.timestamp.get(), p1.timestamp.get()); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.isis.viewer.wicket.model.models; import org.apache.isis.core.runtime.context.IsisAppCommonContext; import org.apache.isis.viewer.common.model.menu.MenuUiModel; /** * Backing model for actions of application services menu bar (typically, as * displayed along the top or side of the page). */ public class ServiceActionsModel extends ModelAbstract<MenuUiModel> { private static final long serialVersionUID = 1L; private final MenuUiModel menuUiModel; /** * @param commonContext * @param menuUiModel - may be null in special case of rendering the tertiary menu on the error page. */ public ServiceActionsModel( final IsisAppCommonContext commonContext, final MenuUiModel menuUiModel) { super(commonContext); this.menuUiModel = menuUiModel; } @Override protected MenuUiModel load() { return menuUiModel; } }
package daybreak.nerfedshield.config; import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.logging.Level; import java.util.logging.Logger; import org.bukkit.configuration.InvalidConfigurationException; import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.configuration.file.YamlConstructor; import org.bukkit.configuration.file.YamlRepresenter; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.Yaml; import org.yaml.snakeyaml.representer.Representer; /** * @author Developed by dumptruckman, LlmDL & Articdive, and modified by * Daybreak 새벽 */ public class CommentedConfiguration extends YamlConfiguration { private static final Logger logger = Logger.getLogger(CommentedConfiguration.class.getName()); private final DumperOptions yamlOptions = new DumperOptions(); private final Representer yamlRepresenter = new YamlRepresenter(); private final Yaml yaml = new Yaml(new YamlConstructor(), yamlRepresenter, yamlOptions); private final HashMap<String, String> comments = new HashMap<>(); private final File file; public CommentedConfiguration(File file) throws IOException, InvalidConfigurationException { super.load(file); this.file = file; } /** * Pass a file and it will return it's contents as a string. * * @param file File to read. * @return Contents of file. String will be empty in case of any errors. */ public static String convertFileToString(File file) { if (file != null && file.exists() && file.canRead() && !file.isDirectory()) { Writer writer = new StringWriter(); char[] buffer = new char[1024]; try (InputStream is = new FileInputStream(file)) { Reader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8)); int n; while ((n = reader.read(buffer)) != -1) { writer.write(buffer, 0, n); } reader.close(); } catch (IOException e) { logger.log(Level.SEVERE, "An error has occurred while converting the File into String."); } return writer.toString(); } else { return ""; } } /** * Writes the contents of a string to a file. * * @param source String to write. * @param file File to write to. */ public static void stringToFile(String source, File file) { try { OutputStreamWriter out = new OutputStreamWriter(new FileOutputStream(file), StandardCharsets.UTF_8); out.write(source); out.close(); } catch (IOException e) { logger.log(Level.SEVERE, "An error has occurred while writing the String to the File."); } } public void load() throws IOException, InvalidConfigurationException { super.load(file); } public void save() throws IOException { this.save(file); if (!comments.isEmpty()) { String[] yamlContents = convertFileToString(file).split("[" + System.lineSeparator() + "]"); StringBuilder newContents = new StringBuilder(); String currentPath = ""; boolean node; int depth = 0; for (String line : yamlContents) { if (line.contains(": ") || (line.length() > 1 && line.charAt(line.length() - 1) == ':')) { node = true; int index; index = line.indexOf(": "); if (index < 0) { index = line.length() - 1; } if (currentPath.isEmpty()) { currentPath = line.substring(0, index); } else { int whiteSpace = 0; for (int n = 0; n < line.length(); n++) { if (line.charAt(n) == ' ') { whiteSpace++; } else { break; } } if (whiteSpace / 2 > depth) { currentPath += "." + line.substring(whiteSpace, index); depth++; } else if (whiteSpace / 2 < depth) { int newDepth = whiteSpace / 2; for (int i = 0; i < depth - newDepth; i++) { currentPath = currentPath.replace(currentPath.substring(Math.max(0, currentPath.lastIndexOf("."))), ""); } int lastIndex = currentPath.lastIndexOf("."); if (lastIndex < 0) { currentPath = ""; } else { currentPath = currentPath.replace(currentPath.substring(Math.max(0, currentPath.lastIndexOf("."))), ""); currentPath += "."; } currentPath += line.substring(whiteSpace, index); depth = newDepth; } else { int lastIndex = currentPath.lastIndexOf("."); if (lastIndex < 0) { currentPath = ""; } else { currentPath = currentPath.replace(currentPath.substring(Math.max(0, currentPath.lastIndexOf("."))), ""); currentPath += "."; } currentPath += line.substring(whiteSpace, index); } } } else node = false; if (node) { String comment; comment = comments.get(currentPath); if (comment != null) { line = comment + System.getProperty("line.separator") + line + System.getProperty("line.separator"); } else { line += System.getProperty("line.separator"); } } newContents.append(line).append((!node) ? System.getProperty("line.separator") : ""); } while (newContents.toString().startsWith(System.getProperty("line.separator"))) newContents = new StringBuilder( newContents.toString().replaceFirst(System.getProperty("line.separator"), "")); stringToFile(newContents.toString(), file); } } /** * Adds a comment just before the specified path. The comment can be multiple * lines. An empty string will indicate a blank line. * * @param path Configuration path to add comment. * @param commentLines Comments to add. One String per line. */ public void addComment(String path, String... commentLines) { StringBuilder commentstring = new StringBuilder(); StringBuilder leadingSpaces = new StringBuilder(); for (int n = 0; n < path.length(); n++) { if (path.charAt(n) == '.') { leadingSpaces.append(" "); } } for (String line : commentLines) { line = line.trim(); if (!line.startsWith("#")) { line = "# " + line; } if (!line.isEmpty()) { line = leadingSpaces + line; } else { line = ""; } if (commentstring.length() > 0) { commentstring.append(System.getProperty("line.separator")); } commentstring.append(line); } comments.put(path, commentstring.toString()); } public void save(File file) throws IOException { Preconditions.checkNotNull(file, "File cannot be null"); file.mkdirs(); String data = this.saveToString(); try (Writer writer = new OutputStreamWriter(new FileOutputStream(file), Charsets.UTF_8)) { writer.write(data); } } @Override public String saveToString() { yamlOptions.setIndent(options().indent()); yamlOptions.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); yamlOptions.setWidth(10000); yamlRepresenter.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK); String dump = yaml.dump(getValues(false)); if (dump.equals(BLANK_CONFIG)) { dump = ""; } return dump; } }
/* * Copyright The OpenTelemetry Authors * SPDX-License-Identifier: Apache-2.0 */ package io.opentelemetry.javaagent.instrumentation.undertow; import io.opentelemetry.context.Context; import io.opentelemetry.context.propagation.TextMapGetter; import io.opentelemetry.instrumentation.api.servlet.AppServerBridge; import io.opentelemetry.instrumentation.api.servlet.ServletSpanNaming; import io.opentelemetry.instrumentation.api.tracer.HttpServerTracer; import io.opentelemetry.javaagent.instrumentation.api.undertow.KeyHolder; import io.undertow.server.HttpServerExchange; import io.undertow.util.AttachmentKey; import java.lang.reflect.Method; import java.net.InetSocketAddress; import org.checkerframework.checker.nullness.qual.Nullable; public class UndertowHttpServerTracer extends HttpServerTracer< HttpServerExchange, HttpServerExchange, HttpServerExchange, HttpServerExchange> { private static final UndertowHttpServerTracer TRACER = new UndertowHttpServerTracer(); public static UndertowHttpServerTracer tracer() { return TRACER; } @Override protected String getInstrumentationName() { return "io.opentelemetry.javaagent.undertow"; } public Context startServerSpan(HttpServerExchange exchange, Method instrumentedMethod) { return startSpan( exchange, exchange, exchange, "HTTP " + exchange.getRequestMethod().toString()); } @Override protected Context customizeContext(Context context, HttpServerExchange exchange) { context = ServletSpanNaming.init(context); return AppServerBridge.init(context); } @SuppressWarnings("unchecked") @Override public @Nullable Context getServerContext(HttpServerExchange exchange) { AttachmentKey<Context> contextKey = (AttachmentKey<Context>) KeyHolder.contextKeys.get(AttachmentKey.class); if (contextKey == null) { return null; } return exchange.getAttachment(contextKey); } @Override protected @Nullable Integer peerPort(HttpServerExchange exchange) { InetSocketAddress peerAddress = exchange.getConnection().getPeerAddress(InetSocketAddress.class); return peerAddress.getPort(); } @Override protected @Nullable String peerHostIP(HttpServerExchange exchange) { InetSocketAddress peerAddress = exchange.getConnection().getPeerAddress(InetSocketAddress.class); return peerAddress.getHostString(); } @Override protected String flavor(HttpServerExchange exchange, HttpServerExchange exchange2) { return exchange.getProtocol().toString(); } @Override protected TextMapGetter<HttpServerExchange> getGetter() { return UndertowExchangeGetter.GETTER; } @Override protected String url(HttpServerExchange exchange) { String result = exchange.getRequestURL(); if (exchange.getQueryString() == null || exchange.getQueryString().isEmpty()) { return result; } else { return result + "?" + exchange.getQueryString(); } } @Override protected String method(HttpServerExchange exchange) { return exchange.getRequestMethod().toString(); } @Override protected @Nullable String requestHeader(HttpServerExchange exchange, String name) { return exchange.getRequestHeaders().getFirst(name); } @Override protected int responseStatus(HttpServerExchange exchange) { return exchange.getStatusCode(); } @SuppressWarnings("unchecked") @Override protected void attachServerContext(Context context, HttpServerExchange exchange) { AttachmentKey<Context> contextKey = (AttachmentKey<Context>) KeyHolder.contextKeys.computeIfAbsent( AttachmentKey.class, key -> AttachmentKey.create(Context.class)); exchange.putAttachment(contextKey, context); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.kstream.internals; import org.apache.kafka.common.serialization.Serde; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.internals.ApiUtils; import org.apache.kafka.streams.kstream.BranchedKStream; import org.apache.kafka.streams.kstream.ForeachAction; import org.apache.kafka.streams.kstream.GlobalKTable; import org.apache.kafka.streams.kstream.Grouped; import org.apache.kafka.streams.kstream.JoinWindows; import org.apache.kafka.streams.kstream.Joined; import org.apache.kafka.streams.kstream.KGroupedStream; import org.apache.kafka.streams.kstream.KStream; import org.apache.kafka.streams.kstream.KTable; import org.apache.kafka.streams.kstream.KeyValueMapper; import org.apache.kafka.streams.kstream.Materialized; import org.apache.kafka.streams.kstream.Named; import org.apache.kafka.streams.kstream.Predicate; import org.apache.kafka.streams.kstream.Printed; import org.apache.kafka.streams.kstream.Produced; import org.apache.kafka.streams.kstream.Repartitioned; import org.apache.kafka.streams.kstream.StreamJoined; import org.apache.kafka.streams.kstream.TransformerSupplier; import org.apache.kafka.streams.kstream.ValueJoiner; import org.apache.kafka.streams.kstream.ValueJoinerWithKey; import org.apache.kafka.streams.kstream.ValueMapper; import org.apache.kafka.streams.kstream.ValueMapperWithKey; import org.apache.kafka.streams.kstream.ValueTransformerSupplier; import org.apache.kafka.streams.kstream.ValueTransformerWithKeySupplier; import org.apache.kafka.streams.kstream.internals.graph.BaseRepartitionNode; import org.apache.kafka.streams.kstream.internals.graph.BaseRepartitionNode.BaseRepartitionNodeBuilder; import org.apache.kafka.streams.kstream.internals.graph.OptimizableRepartitionNode; import org.apache.kafka.streams.kstream.internals.graph.OptimizableRepartitionNode.OptimizableRepartitionNodeBuilder; import org.apache.kafka.streams.kstream.internals.graph.ProcessorGraphNode; import org.apache.kafka.streams.kstream.internals.graph.ProcessorParameters; import org.apache.kafka.streams.kstream.internals.graph.StatefulProcessorNode; import org.apache.kafka.streams.kstream.internals.graph.StreamSinkNode; import org.apache.kafka.streams.kstream.internals.graph.StreamTableJoinNode; import org.apache.kafka.streams.kstream.internals.graph.StreamToTableNode; import org.apache.kafka.streams.kstream.internals.graph.GraphNode; import org.apache.kafka.streams.kstream.internals.graph.UnoptimizableRepartitionNode; import org.apache.kafka.streams.kstream.internals.graph.UnoptimizableRepartitionNode.UnoptimizableRepartitionNodeBuilder; import org.apache.kafka.streams.processor.FailOnInvalidTimestamp; import org.apache.kafka.streams.processor.api.ProcessorSupplier; import org.apache.kafka.streams.processor.StreamPartitioner; import org.apache.kafka.streams.processor.TopicNameExtractor; import org.apache.kafka.streams.kstream.ForeachProcessor; import org.apache.kafka.streams.processor.internals.InternalTopicProperties; import org.apache.kafka.streams.processor.internals.StaticTopicNameExtractor; import org.apache.kafka.streams.state.KeyValueStore; import java.lang.reflect.Array; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Objects; import java.util.Set; import static org.apache.kafka.streams.kstream.internals.graph.OptimizableRepartitionNode.optimizableRepartitionNodeBuilder; public class KStreamImpl<K, V> extends AbstractStream<K, V> implements KStream<K, V> { static final String JOINTHIS_NAME = "KSTREAM-JOINTHIS-"; static final String JOINOTHER_NAME = "KSTREAM-JOINOTHER-"; static final String JOIN_NAME = "KSTREAM-JOIN-"; static final String LEFTJOIN_NAME = "KSTREAM-LEFTJOIN-"; static final String MERGE_NAME = "KSTREAM-MERGE-"; static final String OUTERTHIS_NAME = "KSTREAM-OUTERTHIS-"; static final String OUTEROTHER_NAME = "KSTREAM-OUTEROTHER-"; static final String WINDOWED_NAME = "KSTREAM-WINDOWED-"; static final String OUTERSHARED_NAME = "KSTREAM-OUTERSHARED-"; static final String SOURCE_NAME = "KSTREAM-SOURCE-"; static final String SINK_NAME = "KSTREAM-SINK-"; static final String REPARTITION_TOPIC_SUFFIX = "-repartition"; private static final String BRANCH_NAME = "KSTREAM-BRANCH-"; private static final String BRANCHCHILD_NAME = "KSTREAM-BRANCHCHILD-"; private static final String FILTER_NAME = "KSTREAM-FILTER-"; private static final String PEEK_NAME = "KSTREAM-PEEK-"; private static final String FLATMAP_NAME = "KSTREAM-FLATMAP-"; private static final String FLATMAPVALUES_NAME = "KSTREAM-FLATMAPVALUES-"; private static final String MAP_NAME = "KSTREAM-MAP-"; private static final String MAPVALUES_NAME = "KSTREAM-MAPVALUES-"; private static final String PROCESSOR_NAME = "KSTREAM-PROCESSOR-"; private static final String PRINTING_NAME = "KSTREAM-PRINTER-"; private static final String KEY_SELECT_NAME = "KSTREAM-KEY-SELECT-"; private static final String TRANSFORM_NAME = "KSTREAM-TRANSFORM-"; private static final String TRANSFORMVALUES_NAME = "KSTREAM-TRANSFORMVALUES-"; private static final String FOREACH_NAME = "KSTREAM-FOREACH-"; private static final String TO_KTABLE_NAME = "KSTREAM-TOTABLE-"; private static final String REPARTITION_NAME = "KSTREAM-REPARTITION-"; private final boolean repartitionRequired; private OptimizableRepartitionNode<K, V> repartitionNode; KStreamImpl(final String name, final Serde<K> keySerde, final Serde<V> valueSerde, final Set<String> subTopologySourceNodes, final boolean repartitionRequired, final GraphNode graphNode, final InternalStreamsBuilder builder) { super(name, keySerde, valueSerde, subTopologySourceNodes, graphNode, builder); this.repartitionRequired = repartitionRequired; } @Override public KStream<K, V> filter(final Predicate<? super K, ? super V> predicate) { return filter(predicate, NamedInternal.empty()); } @Override public KStream<K, V> filter(final Predicate<? super K, ? super V> predicate, final Named named) { Objects.requireNonNull(predicate, "predicate can't be null"); Objects.requireNonNull(named, "named can't be null"); final String name = new NamedInternal(named).orElseGenerateWithPrefix(builder, FILTER_NAME); final ProcessorParameters<? super K, ? super V, ?, ?> processorParameters = new ProcessorParameters<>(new KStreamFilter<>(predicate, false), name); final ProcessorGraphNode<? super K, ? super V> filterProcessorNode = new ProcessorGraphNode<>(name, processorParameters); builder.addGraphNode(graphNode, filterProcessorNode); return new KStreamImpl<>( name, keySerde, valueSerde, subTopologySourceNodes, repartitionRequired, filterProcessorNode, builder); } @Override public KStream<K, V> filterNot(final Predicate<? super K, ? super V> predicate) { return filterNot(predicate, NamedInternal.empty()); } @Override public KStream<K, V> filterNot(final Predicate<? super K, ? super V> predicate, final Named named) { Objects.requireNonNull(predicate, "predicate can't be null"); Objects.requireNonNull(named, "named can't be null"); final String name = new NamedInternal(named).orElseGenerateWithPrefix(builder, FILTER_NAME); final ProcessorParameters<? super K, ? super V, ?, ?> processorParameters = new ProcessorParameters<>(new KStreamFilter<>(predicate, true), name); final ProcessorGraphNode<? super K, ? super V> filterNotProcessorNode = new ProcessorGraphNode<>(name, processorParameters); builder.addGraphNode(graphNode, filterNotProcessorNode); return new KStreamImpl<>( name, keySerde, valueSerde, subTopologySourceNodes, repartitionRequired, filterNotProcessorNode, builder); } @Override public <KR> KStream<KR, V> selectKey(final KeyValueMapper<? super K, ? super V, ? extends KR> mapper) { return selectKey(mapper, NamedInternal.empty()); } @Override public <KR> KStream<KR, V> selectKey(final KeyValueMapper<? super K, ? super V, ? extends KR> mapper, final Named named) { Objects.requireNonNull(mapper, "mapper can't be null"); Objects.requireNonNull(named, "named can't be null"); final ProcessorGraphNode<K, V> selectKeyProcessorNode = internalSelectKey(mapper, new NamedInternal(named)); selectKeyProcessorNode.keyChangingOperation(true); builder.addGraphNode(graphNode, selectKeyProcessorNode); // key serde cannot be preserved return new KStreamImpl<>( selectKeyProcessorNode.nodeName(), null, valueSerde, subTopologySourceNodes, true, selectKeyProcessorNode, builder); } private <KR> ProcessorGraphNode<K, V> internalSelectKey(final KeyValueMapper<? super K, ? super V, ? extends KR> mapper, final NamedInternal named) { final String name = named.orElseGenerateWithPrefix(builder, KEY_SELECT_NAME); final KStreamMap<K, V, KR, V> kStreamMap = new KStreamMap<>((key, value) -> new KeyValue<>(mapper.apply(key, value), value)); final ProcessorParameters<K, V, ?, ?> processorParameters = new ProcessorParameters<>(kStreamMap, name); return new ProcessorGraphNode<>(name, processorParameters); } @Override public <KR, VR> KStream<KR, VR> map(final KeyValueMapper<? super K, ? super V, ? extends KeyValue<? extends KR, ? extends VR>> mapper) { return map(mapper, NamedInternal.empty()); } @Override public <KR, VR> KStream<KR, VR> map(final KeyValueMapper<? super K, ? super V, ? extends KeyValue<? extends KR, ? extends VR>> mapper, final Named named) { Objects.requireNonNull(mapper, "mapper can't be null"); Objects.requireNonNull(named, "named can't be null"); final String name = new NamedInternal(named).orElseGenerateWithPrefix(builder, MAP_NAME); final ProcessorParameters<? super K, ? super V, ?, ?> processorParameters = new ProcessorParameters<>(new KStreamMap<>(mapper), name); final ProcessorGraphNode<? super K, ? super V> mapProcessorNode = new ProcessorGraphNode<>(name, processorParameters); mapProcessorNode.keyChangingOperation(true); builder.addGraphNode(graphNode, mapProcessorNode); // key and value serde cannot be preserved return new KStreamImpl<>( name, null, null, subTopologySourceNodes, true, mapProcessorNode, builder); } @Override public <VR> KStream<K, VR> mapValues(final ValueMapper<? super V, ? extends VR> valueMapper) { return mapValues(withKey(valueMapper)); } @Override public <VR> KStream<K, VR> mapValues(final ValueMapper<? super V, ? extends VR> mapper, final Named named) { return mapValues(withKey(mapper), named); } @Override public <VR> KStream<K, VR> mapValues(final ValueMapperWithKey<? super K, ? super V, ? extends VR> valueMapperWithKey) { return mapValues(valueMapperWithKey, NamedInternal.empty()); } @Override public <VR> KStream<K, VR> mapValues(final ValueMapperWithKey<? super K, ? super V, ? extends VR> valueMapperWithKey, final Named named) { Objects.requireNonNull(valueMapperWithKey, "valueMapperWithKey can't be null"); Objects.requireNonNull(named, "named can't be null"); final String name = new NamedInternal(named).orElseGenerateWithPrefix(builder, MAPVALUES_NAME); final ProcessorParameters<? super K, ? super V, ?, ?> processorParameters = new ProcessorParameters<>(new KStreamMapValues<>(valueMapperWithKey), name); final ProcessorGraphNode<? super K, ? super V> mapValuesProcessorNode = new ProcessorGraphNode<>(name, processorParameters); mapValuesProcessorNode.setValueChangingOperation(true); builder.addGraphNode(graphNode, mapValuesProcessorNode); // value serde cannot be preserved return new KStreamImpl<>( name, keySerde, null, subTopologySourceNodes, repartitionRequired, mapValuesProcessorNode, builder); } @Override public <KR, VR> KStream<KR, VR> flatMap(final KeyValueMapper<? super K, ? super V, ? extends Iterable<? extends KeyValue<? extends KR, ? extends VR>>> mapper) { return flatMap(mapper, NamedInternal.empty()); } @Override public <KR, VR> KStream<KR, VR> flatMap(final KeyValueMapper<? super K, ? super V, ? extends Iterable<? extends KeyValue<? extends KR, ? extends VR>>> mapper, final Named named) { Objects.requireNonNull(mapper, "mapper can't be null"); Objects.requireNonNull(named, "named can't be null"); final String name = new NamedInternal(named).orElseGenerateWithPrefix(builder, FLATMAP_NAME); final ProcessorParameters<? super K, ? super V, ?, ?> processorParameters = new ProcessorParameters<>(new KStreamFlatMap<>(mapper), name); final ProcessorGraphNode<? super K, ? super V> flatMapNode = new ProcessorGraphNode<>(name, processorParameters); flatMapNode.keyChangingOperation(true); builder.addGraphNode(graphNode, flatMapNode); // key and value serde cannot be preserved return new KStreamImpl<>(name, null, null, subTopologySourceNodes, true, flatMapNode, builder); } @Override public <VR> KStream<K, VR> flatMapValues(final ValueMapper<? super V, ? extends Iterable<? extends VR>> mapper) { return flatMapValues(withKey(mapper)); } @Override public <VR> KStream<K, VR> flatMapValues(final ValueMapper<? super V, ? extends Iterable<? extends VR>> mapper, final Named named) { return flatMapValues(withKey(mapper), named); } @Override public <VR> KStream<K, VR> flatMapValues(final ValueMapperWithKey<? super K, ? super V, ? extends Iterable<? extends VR>> mapper) { return flatMapValues(mapper, NamedInternal.empty()); } @Override public <VR> KStream<K, VR> flatMapValues(final ValueMapperWithKey<? super K, ? super V, ? extends Iterable<? extends VR>> valueMapper, final Named named) { Objects.requireNonNull(valueMapper, "valueMapper can't be null"); Objects.requireNonNull(named, "named can't be null"); final String name = new NamedInternal(named).orElseGenerateWithPrefix(builder, FLATMAPVALUES_NAME); final ProcessorParameters<? super K, ? super V, ?, ?> processorParameters = new ProcessorParameters<>(new KStreamFlatMapValues<>(valueMapper), name); final ProcessorGraphNode<? super K, ? super V> flatMapValuesNode = new ProcessorGraphNode<>(name, processorParameters); flatMapValuesNode.setValueChangingOperation(true); builder.addGraphNode(graphNode, flatMapValuesNode); // value serde cannot be preserved return new KStreamImpl<>( name, keySerde, null, subTopologySourceNodes, repartitionRequired, flatMapValuesNode, builder); } @Override public void print(final Printed<K, V> printed) { Objects.requireNonNull(printed, "printed can't be null"); final PrintedInternal<K, V> printedInternal = new PrintedInternal<>(printed); final String name = new NamedInternal(printedInternal.name()).orElseGenerateWithPrefix(builder, PRINTING_NAME); final ProcessorParameters<? super K, ? super V, ?, ?> processorParameters = new ProcessorParameters<>(printedInternal.build(this.name), name); final ProcessorGraphNode<? super K, ? super V> printNode = new ProcessorGraphNode<>(name, processorParameters); builder.addGraphNode(graphNode, printNode); } @Override public void foreach(final ForeachAction<? super K, ? super V> action) { foreach(action, NamedInternal.empty()); } @Override public void foreach(final ForeachAction<? super K, ? super V> action, final Named named) { Objects.requireNonNull(action, "action can't be null"); Objects.requireNonNull(named, "named can't be null"); final String name = new NamedInternal(named).orElseGenerateWithPrefix(builder, FOREACH_NAME); final ProcessorParameters<? super K, ? super V, ?, ?> processorParameters = new ProcessorParameters<>(() -> new ForeachProcessor<>(action), name); final ProcessorGraphNode<? super K, ? super V> foreachNode = new ProcessorGraphNode<>(name, processorParameters); builder.addGraphNode(graphNode, foreachNode); } @Override public KStream<K, V> peek(final ForeachAction<? super K, ? super V> action) { return peek(action, NamedInternal.empty()); } @Override public KStream<K, V> peek(final ForeachAction<? super K, ? super V> action, final Named named) { Objects.requireNonNull(action, "action can't be null"); Objects.requireNonNull(named, "named can't be null"); final String name = new NamedInternal(named).orElseGenerateWithPrefix(builder, PEEK_NAME); final ProcessorParameters<? super K, ? super V, ?, ?> processorParameters = new ProcessorParameters<>(new KStreamPeek<>(action), name); final ProcessorGraphNode<? super K, ? super V> peekNode = new ProcessorGraphNode<>(name, processorParameters); builder.addGraphNode(graphNode, peekNode); return new KStreamImpl<>( name, keySerde, valueSerde, subTopologySourceNodes, repartitionRequired, peekNode, builder); } @Deprecated @Override @SuppressWarnings("unchecked") public KStream<K, V>[] branch(final Predicate<? super K, ? super V>... predicates) { return doBranch(NamedInternal.empty(), predicates); } @Deprecated @Override @SuppressWarnings("unchecked") public KStream<K, V>[] branch(final Named named, final Predicate<? super K, ? super V>... predicates) { Objects.requireNonNull(named, "named can't be null"); return doBranch(new NamedInternal(named), predicates); } @SuppressWarnings({"unchecked", "rawtypes"}) private KStream<K, V>[] doBranch(final NamedInternal named, final Predicate<? super K, ? super V>... predicates) { Objects.requireNonNull(predicates, "predicates can't be a null array"); if (predicates.length == 0) { throw new IllegalArgumentException("branch() requires at least one predicate"); } for (final Predicate<? super K, ? super V> predicate : predicates) { Objects.requireNonNull(predicate, "predicates can't be null"); } final String branchName = named.orElseGenerateWithPrefix(builder, BRANCH_NAME); final String[] childNames = new String[predicates.length]; for (int i = 0; i < predicates.length; i++) { childNames[i] = named.suffixWithOrElseGet("-predicate-" + i, builder, BRANCHCHILD_NAME); } final ProcessorParameters processorParameters = new ProcessorParameters<>(new KStreamBranch(Arrays.asList(predicates.clone()), Arrays.asList(childNames)), branchName); final ProcessorGraphNode<K, V> branchNode = new ProcessorGraphNode<>(branchName, processorParameters); builder.addGraphNode(graphNode, branchNode); final KStream<K, V>[] branchChildren = (KStream<K, V>[]) Array.newInstance(KStream.class, predicates.length); for (int i = 0; i < predicates.length; i++) { final ProcessorParameters innerProcessorParameters = new ProcessorParameters<>(new PassThrough<K, V>(), childNames[i]); final ProcessorGraphNode<K, V> branchChildNode = new ProcessorGraphNode<>(childNames[i], innerProcessorParameters); builder.addGraphNode(branchNode, branchChildNode); branchChildren[i] = new KStreamImpl<>(childNames[i], keySerde, valueSerde, subTopologySourceNodes, repartitionRequired, branchChildNode, builder); } return branchChildren; } @Override public BranchedKStream<K, V> split() { return new BranchedKStreamImpl<>(this, repartitionRequired, NamedInternal.empty()); } @Override public BranchedKStream<K, V> split(final Named named) { Objects.requireNonNull(named, "named can't be null"); return new BranchedKStreamImpl<>(this, repartitionRequired, new NamedInternal(named)); } @Override public KStream<K, V> merge(final KStream<K, V> stream) { return merge(stream, NamedInternal.empty()); } @Override public KStream<K, V> merge(final KStream<K, V> stream, final Named named) { Objects.requireNonNull(stream, "stream can't be null"); Objects.requireNonNull(named, "named can't be null"); return merge(builder, stream, new NamedInternal(named)); } private KStream<K, V> merge(final InternalStreamsBuilder builder, final KStream<K, V> stream, final NamedInternal named) { final KStreamImpl<K, V> streamImpl = (KStreamImpl<K, V>) stream; final boolean requireRepartitioning = streamImpl.repartitionRequired || repartitionRequired; final String name = named.orElseGenerateWithPrefix(builder, MERGE_NAME); final Set<String> allSubTopologySourceNodes = new HashSet<>(); allSubTopologySourceNodes.addAll(subTopologySourceNodes); allSubTopologySourceNodes.addAll(streamImpl.subTopologySourceNodes); final ProcessorParameters<? super K, ? super V, ?, ?> processorParameters = new ProcessorParameters<>(new PassThrough<>(), name); final ProcessorGraphNode<? super K, ? super V> mergeNode = new ProcessorGraphNode<>(name, processorParameters); mergeNode.setMergeNode(true); builder.addGraphNode(Arrays.asList(graphNode, streamImpl.graphNode), mergeNode); // drop the serde as we cannot safely use either one to represent both streams return new KStreamImpl<>( name, null, null, allSubTopologySourceNodes, requireRepartitioning, mergeNode, builder); } @Deprecated @Override public KStream<K, V> through(final String topic) { return through(topic, Produced.with(keySerde, valueSerde, null)); } @Deprecated @Override public KStream<K, V> through(final String topic, final Produced<K, V> produced) { Objects.requireNonNull(topic, "topic can't be null"); Objects.requireNonNull(produced, "produced can't be null"); final ProducedInternal<K, V> producedInternal = new ProducedInternal<>(produced); if (producedInternal.keySerde() == null) { producedInternal.withKeySerde(keySerde); } if (producedInternal.valueSerde() == null) { producedInternal.withValueSerde(valueSerde); } to(topic, producedInternal); return builder.stream( Collections.singleton(topic), new ConsumedInternal<>( producedInternal.keySerde(), producedInternal.valueSerde(), new FailOnInvalidTimestamp(), null ) ); } @Override public KStream<K, V> repartition() { return doRepartition(Repartitioned.as(null)); } @Override public KStream<K, V> repartition(final Repartitioned<K, V> repartitioned) { return doRepartition(repartitioned); } private KStream<K, V> doRepartition(final Repartitioned<K, V> repartitioned) { Objects.requireNonNull(repartitioned, "repartitioned can't be null"); final RepartitionedInternal<K, V> repartitionedInternal = new RepartitionedInternal<>(repartitioned); final String name = repartitionedInternal.name() != null ? repartitionedInternal.name() : builder .newProcessorName(REPARTITION_NAME); final Serde<V> valueSerde = repartitionedInternal.valueSerde() == null ? this.valueSerde : repartitionedInternal.valueSerde(); final Serde<K> keySerde = repartitionedInternal.keySerde() == null ? this.keySerde : repartitionedInternal.keySerde(); final UnoptimizableRepartitionNodeBuilder<K, V> unoptimizableRepartitionNodeBuilder = UnoptimizableRepartitionNode .unoptimizableRepartitionNodeBuilder(); final InternalTopicProperties internalTopicProperties = repartitionedInternal.toInternalTopicProperties(); final String repartitionSourceName = createRepartitionedSource( builder, repartitionedInternal.keySerde(), valueSerde, name, repartitionedInternal.streamPartitioner(), unoptimizableRepartitionNodeBuilder.withInternalTopicProperties(internalTopicProperties) ); final UnoptimizableRepartitionNode<K, V> unoptimizableRepartitionNode = unoptimizableRepartitionNodeBuilder.build(); builder.addGraphNode(graphNode, unoptimizableRepartitionNode); final Set<String> sourceNodes = new HashSet<>(); sourceNodes.add(unoptimizableRepartitionNode.nodeName()); return new KStreamImpl<>( repartitionSourceName, keySerde, valueSerde, Collections.unmodifiableSet(sourceNodes), false, unoptimizableRepartitionNode, builder ); } @Override public void to(final String topic) { to(topic, Produced.with(keySerde, valueSerde, null)); } @Override public void to(final String topic, final Produced<K, V> produced) { Objects.requireNonNull(topic, "topic can't be null"); Objects.requireNonNull(produced, "produced can't be null"); final ProducedInternal<K, V> producedInternal = new ProducedInternal<>(produced); if (producedInternal.keySerde() == null) { producedInternal.withKeySerde(keySerde); } if (producedInternal.valueSerde() == null) { producedInternal.withValueSerde(valueSerde); } to(new StaticTopicNameExtractor<>(topic), producedInternal); } @Override public void to(final TopicNameExtractor<K, V> topicExtractor) { to(topicExtractor, Produced.with(keySerde, valueSerde, null)); } @Override public void to(final TopicNameExtractor<K, V> topicExtractor, final Produced<K, V> produced) { Objects.requireNonNull(topicExtractor, "topicExtractor can't be null"); Objects.requireNonNull(produced, "produced can't be null"); final ProducedInternal<K, V> producedInternal = new ProducedInternal<>(produced); if (producedInternal.keySerde() == null) { producedInternal.withKeySerde(keySerde); } if (producedInternal.valueSerde() == null) { producedInternal.withValueSerde(valueSerde); } to(topicExtractor, producedInternal); } private void to(final TopicNameExtractor<K, V> topicExtractor, final ProducedInternal<K, V> produced) { final String name = new NamedInternal(produced.name()).orElseGenerateWithPrefix(builder, SINK_NAME); final StreamSinkNode<K, V> sinkNode = new StreamSinkNode<>( name, topicExtractor, produced ); builder.addGraphNode(graphNode, sinkNode); } @Override public KTable<K, V> toTable() { return toTable(NamedInternal.empty(), Materialized.with(keySerde, valueSerde)); } @Override public KTable<K, V> toTable(final Named named) { return toTable(named, Materialized.with(keySerde, valueSerde)); } @Override public KTable<K, V> toTable(final Materialized<K, V, KeyValueStore<Bytes, byte[]>> materialized) { return toTable(NamedInternal.empty(), materialized); } @Override public KTable<K, V> toTable(final Named named, final Materialized<K, V, KeyValueStore<Bytes, byte[]>> materialized) { Objects.requireNonNull(named, "named can't be null"); Objects.requireNonNull(materialized, "materialized can't be null"); final NamedInternal namedInternal = new NamedInternal(named); final String name = namedInternal.orElseGenerateWithPrefix(builder, TO_KTABLE_NAME); final MaterializedInternal<K, V, KeyValueStore<Bytes, byte[]>> materializedInternal = new MaterializedInternal<>(materialized, builder, TO_KTABLE_NAME); final Serde<K> keySerdeOverride = materializedInternal.keySerde() == null ? keySerde : materializedInternal.keySerde(); final Serde<V> valueSerdeOverride = materializedInternal.valueSerde() == null ? valueSerde : materializedInternal.valueSerde(); final Set<String> subTopologySourceNodes; final GraphNode tableParentNode; if (repartitionRequired) { final OptimizableRepartitionNodeBuilder<K, V> repartitionNodeBuilder = optimizableRepartitionNodeBuilder(); final String sourceName = createRepartitionedSource( builder, keySerdeOverride, valueSerdeOverride, name, null, repartitionNodeBuilder ); tableParentNode = repartitionNodeBuilder.build(); builder.addGraphNode(graphNode, tableParentNode); subTopologySourceNodes = Collections.singleton(sourceName); } else { tableParentNode = graphNode; subTopologySourceNodes = this.subTopologySourceNodes; } final KTableSource<K, V> tableSource = new KTableSource<>( materializedInternal.storeName(), materializedInternal.queryableStoreName() ); final ProcessorParameters<K, V, ?, ?> processorParameters = new ProcessorParameters<>(tableSource, name); final GraphNode tableNode = new StreamToTableNode<>( name, processorParameters, materializedInternal ); builder.addGraphNode(tableParentNode, tableNode); return new KTableImpl<K, V, V>( name, keySerdeOverride, valueSerdeOverride, subTopologySourceNodes, materializedInternal.queryableStoreName(), tableSource, tableNode, builder ); } @Override public <KR> KGroupedStream<KR, V> groupBy(final KeyValueMapper<? super K, ? super V, KR> keySelector) { return groupBy(keySelector, Grouped.with(null, valueSerde)); } @Override public <KR> KGroupedStream<KR, V> groupBy(final KeyValueMapper<? super K, ? super V, KR> keySelector, final Grouped<KR, V> grouped) { Objects.requireNonNull(keySelector, "keySelector can't be null"); Objects.requireNonNull(grouped, "grouped can't be null"); final GroupedInternal<KR, V> groupedInternal = new GroupedInternal<>(grouped); final ProcessorGraphNode<K, V> selectKeyMapNode = internalSelectKey(keySelector, new NamedInternal(groupedInternal.name())); selectKeyMapNode.keyChangingOperation(true); builder.addGraphNode(graphNode, selectKeyMapNode); return new KGroupedStreamImpl<>( selectKeyMapNode.nodeName(), subTopologySourceNodes, groupedInternal, true, selectKeyMapNode, builder); } @Override public KGroupedStream<K, V> groupByKey() { return groupByKey(Grouped.with(keySerde, valueSerde)); } @Override public KGroupedStream<K, V> groupByKey(final Grouped<K, V> grouped) { Objects.requireNonNull(grouped, "grouped can't be null"); final GroupedInternal<K, V> groupedInternal = new GroupedInternal<>(grouped); return new KGroupedStreamImpl<>( name, subTopologySourceNodes, groupedInternal, repartitionRequired, graphNode, builder); } @Override public <VO, VR> KStream<K, VR> join(final KStream<K, VO> otherStream, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows) { return join(otherStream, toValueJoinerWithKey(joiner), windows); } @Override public <VO, VR> KStream<K, VR> join(final KStream<K, VO> otherStream, final ValueJoinerWithKey<? super K, ? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows) { return join(otherStream, joiner, windows, StreamJoined.with(null, null, null)); } @Override public <VO, VR> KStream<K, VR> join(final KStream<K, VO> otherStream, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows, final StreamJoined<K, V, VO> streamJoined) { return join(otherStream, toValueJoinerWithKey(joiner), windows, streamJoined); } @Override public <VO, VR> KStream<K, VR> join(final KStream<K, VO> otherStream, final ValueJoinerWithKey<? super K, ? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows, final StreamJoined<K, V, VO> streamJoined) { return doJoin( otherStream, joiner, windows, streamJoined, new KStreamImplJoin(builder, false, false)); } @Override public <VO, VR> KStream<K, VR> leftJoin(final KStream<K, VO> otherStream, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows) { return leftJoin(otherStream, toValueJoinerWithKey(joiner), windows); } @Override public <VO, VR> KStream<K, VR> leftJoin(final KStream<K, VO> otherStream, final ValueJoinerWithKey<? super K, ? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows) { return leftJoin(otherStream, joiner, windows, StreamJoined.with(null, null, null)); } @Override public <VO, VR> KStream<K, VR> leftJoin(final KStream<K, VO> otherStream, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows, final StreamJoined<K, V, VO> streamJoined) { return doJoin( otherStream, toValueJoinerWithKey(joiner), windows, streamJoined, new KStreamImplJoin(builder, true, false)); } @Override public <VO, VR> KStream<K, VR> leftJoin(final KStream<K, VO> otherStream, final ValueJoinerWithKey<? super K, ? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows, final StreamJoined<K, V, VO> streamJoined) { return doJoin( otherStream, joiner, windows, streamJoined, new KStreamImplJoin(builder, true, false)); } @Override public <VO, VR> KStream<K, VR> outerJoin(final KStream<K, VO> otherStream, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows) { return outerJoin(otherStream, toValueJoinerWithKey(joiner), windows); } @Override public <VO, VR> KStream<K, VR> outerJoin(final KStream<K, VO> otherStream, final ValueJoinerWithKey<? super K, ? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows) { return outerJoin(otherStream, joiner, windows, StreamJoined.with(null, null, null)); } @Override public <VO, VR> KStream<K, VR> outerJoin(final KStream<K, VO> otherStream, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows, final StreamJoined<K, V, VO> streamJoined) { return outerJoin(otherStream, toValueJoinerWithKey(joiner), windows, streamJoined); } @Override public <VO, VR> KStream<K, VR> outerJoin(final KStream<K, VO> otherStream, final ValueJoinerWithKey<? super K, ? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows, final StreamJoined<K, V, VO> streamJoined) { return doJoin(otherStream, joiner, windows, streamJoined, new KStreamImplJoin(builder, true, true)); } private <VO, VR> KStream<K, VR> doJoin(final KStream<K, VO> otherStream, final ValueJoinerWithKey<? super K, ? super V, ? super VO, ? extends VR> joiner, final JoinWindows windows, final StreamJoined<K, V, VO> streamJoined, final KStreamImplJoin join) { Objects.requireNonNull(otherStream, "otherStream can't be null"); Objects.requireNonNull(joiner, "joiner can't be null"); Objects.requireNonNull(windows, "windows can't be null"); Objects.requireNonNull(streamJoined, "streamJoined can't be null"); KStreamImpl<K, V> joinThis = this; KStreamImpl<K, VO> joinOther = (KStreamImpl<K, VO>) otherStream; final StreamJoinedInternal<K, V, VO> streamJoinedInternal = new StreamJoinedInternal<>(streamJoined); final NamedInternal name = new NamedInternal(streamJoinedInternal.name()); if (joinThis.repartitionRequired) { final String joinThisName = joinThis.name; final String leftJoinRepartitionTopicName = name.suffixWithOrElseGet("-left", joinThisName); joinThis = joinThis.repartitionForJoin(leftJoinRepartitionTopicName, streamJoinedInternal.keySerde(), streamJoinedInternal.valueSerde()); } if (joinOther.repartitionRequired) { final String joinOtherName = joinOther.name; final String rightJoinRepartitionTopicName = name.suffixWithOrElseGet("-right", joinOtherName); joinOther = joinOther.repartitionForJoin(rightJoinRepartitionTopicName, streamJoinedInternal.keySerde(), streamJoinedInternal.otherValueSerde()); } joinThis.ensureCopartitionWith(Collections.singleton(joinOther)); return join.join( joinThis, joinOther, joiner, windows, streamJoined); } /** * Repartition a stream. This is required on join operations occurring after * an operation that changes the key, i.e, selectKey, map(..), flatMap(..). */ private KStreamImpl<K, V> repartitionForJoin(final String repartitionName, final Serde<K> keySerdeOverride, final Serde<V> valueSerdeOverride) { final Serde<K> repartitionKeySerde = keySerdeOverride != null ? keySerdeOverride : keySerde; final Serde<V> repartitionValueSerde = valueSerdeOverride != null ? valueSerdeOverride : valueSerde; final OptimizableRepartitionNodeBuilder<K, V> optimizableRepartitionNodeBuilder = OptimizableRepartitionNode.optimizableRepartitionNodeBuilder(); // we still need to create the repartitioned source each time // as it increments the counter which // is needed to maintain topology compatibility final String repartitionedSourceName = createRepartitionedSource( builder, repartitionKeySerde, repartitionValueSerde, repartitionName, null, optimizableRepartitionNodeBuilder); if (repartitionNode == null || !name.equals(repartitionName)) { repartitionNode = optimizableRepartitionNodeBuilder.build(); builder.addGraphNode(graphNode, repartitionNode); } return new KStreamImpl<>( repartitionedSourceName, repartitionKeySerde, repartitionValueSerde, Collections.singleton(repartitionedSourceName), false, repartitionNode, builder); } static <K1, V1, RN extends BaseRepartitionNode<K1, V1>> String createRepartitionedSource(final InternalStreamsBuilder builder, final Serde<K1> keySerde, final Serde<V1> valueSerde, final String repartitionTopicNamePrefix, final StreamPartitioner<K1, V1> streamPartitioner, final BaseRepartitionNodeBuilder<K1, V1, RN> baseRepartitionNodeBuilder) { final String repartitionTopicName = repartitionTopicNamePrefix.endsWith(REPARTITION_TOPIC_SUFFIX) ? repartitionTopicNamePrefix : repartitionTopicNamePrefix + REPARTITION_TOPIC_SUFFIX; // Always need to generate the names to burn index counter for compatibility final String genSinkName = builder.newProcessorName(SINK_NAME); final String genNullKeyFilterProcessorName = builder.newProcessorName(FILTER_NAME); final String genSourceName = builder.newProcessorName(SOURCE_NAME); final String sinkName; final String sourceName; final String nullKeyFilterProcessorName; if (repartitionTopicNamePrefix.matches("KSTREAM.*-[0-9]{10}")) { sinkName = genSinkName; sourceName = genSourceName; nullKeyFilterProcessorName = genNullKeyFilterProcessorName; } else { sinkName = repartitionTopicName + "-sink"; sourceName = repartitionTopicName + "-source"; nullKeyFilterProcessorName = repartitionTopicName + "-filter"; } final Predicate<K1, V1> notNullKeyPredicate = (k, v) -> k != null; final ProcessorParameters<K1, V1, ?, ?> processorParameters = new ProcessorParameters<>( new KStreamFilter<>(notNullKeyPredicate, false), nullKeyFilterProcessorName ); baseRepartitionNodeBuilder.withKeySerde(keySerde) .withValueSerde(valueSerde) .withSourceName(sourceName) .withRepartitionTopic(repartitionTopicName) .withSinkName(sinkName) .withProcessorParameters(processorParameters) .withStreamPartitioner(streamPartitioner) // reusing the source name for the graph node name // adding explicit variable as it simplifies logic .withNodeName(sourceName); return sourceName; } @Override public <VO, VR> KStream<K, VR> join(final KTable<K, VO> table, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner) { return join(table, toValueJoinerWithKey(joiner)); } @Override public <VO, VR> KStream<K, VR> join(final KTable<K, VO> table, final ValueJoinerWithKey<? super K, ? super V, ? super VO, ? extends VR> joiner) { return join(table, joiner, Joined.with(null, null, null)); } @Override public <VO, VR> KStream<K, VR> join(final KTable<K, VO> table, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final Joined<K, V, VO> joined) { Objects.requireNonNull(table, "table can't be null"); Objects.requireNonNull(joiner, "joiner can't be null"); Objects.requireNonNull(joined, "joined can't be null"); return join(table, toValueJoinerWithKey(joiner), joined); } @Override public <VO, VR> KStream<K, VR> join(final KTable<K, VO> table, final ValueJoinerWithKey<? super K, ? super V, ? super VO, ? extends VR> joiner, final Joined<K, V, VO> joined) { Objects.requireNonNull(table, "table can't be null"); Objects.requireNonNull(joiner, "joiner can't be null"); Objects.requireNonNull(joined, "joined can't be null"); final JoinedInternal<K, V, VO> joinedInternal = new JoinedInternal<>(joined); final String name = joinedInternal.name(); if (repartitionRequired) { final KStreamImpl<K, V> thisStreamRepartitioned = repartitionForJoin( name != null ? name : this.name, joined.keySerde(), joined.valueSerde() ); return thisStreamRepartitioned.doStreamTableJoin(table, joiner, joined, false); } else { return doStreamTableJoin(table, joiner, joined, false); } } @Override public <VO, VR> KStream<K, VR> leftJoin(final KTable<K, VO> table, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner) { return leftJoin(table, toValueJoinerWithKey(joiner)); } @Override public <VO, VR> KStream<K, VR> leftJoin(final KTable<K, VO> table, final ValueJoinerWithKey<? super K, ? super V, ? super VO, ? extends VR> joiner) { return leftJoin(table, joiner, Joined.with(null, null, null)); } @Override public <VO, VR> KStream<K, VR> leftJoin(final KTable<K, VO> table, final ValueJoiner<? super V, ? super VO, ? extends VR> joiner, final Joined<K, V, VO> joined) { Objects.requireNonNull(table, "table can't be null"); Objects.requireNonNull(joiner, "joiner can't be null"); Objects.requireNonNull(joined, "joined can't be null"); return leftJoin(table, toValueJoinerWithKey(joiner), joined); } @Override public <VO, VR> KStream<K, VR> leftJoin(final KTable<K, VO> table, final ValueJoinerWithKey<? super K, ? super V, ? super VO, ? extends VR> joiner, final Joined<K, V, VO> joined) { Objects.requireNonNull(table, "table can't be null"); Objects.requireNonNull(joiner, "joiner can't be null"); Objects.requireNonNull(joined, "joined can't be null"); final JoinedInternal<K, V, VO> joinedInternal = new JoinedInternal<>(joined); final String name = joinedInternal.name(); if (repartitionRequired) { final KStreamImpl<K, V> thisStreamRepartitioned = repartitionForJoin( name != null ? name : this.name, joined.keySerde(), joined.valueSerde() ); return thisStreamRepartitioned.doStreamTableJoin(table, joiner, joined, true); } else { return doStreamTableJoin(table, joiner, joined, true); } } @Override public <KG, VG, VR> KStream<K, VR> join(final GlobalKTable<KG, VG> globalTable, final KeyValueMapper<? super K, ? super V, ? extends KG> keySelector, final ValueJoiner<? super V, ? super VG, ? extends VR> joiner) { return join(globalTable, keySelector, toValueJoinerWithKey(joiner)); } @Override public <KG, VG, VR> KStream<K, VR> join(final GlobalKTable<KG, VG> globalTable, final KeyValueMapper<? super K, ? super V, ? extends KG> keySelector, final ValueJoinerWithKey<? super K, ? super V, ? super VG, ? extends VR> joiner) { return globalTableJoin(globalTable, keySelector, joiner, false, NamedInternal.empty()); } @Override public <KG, VG, VR> KStream<K, VR> join(final GlobalKTable<KG, VG> globalTable, final KeyValueMapper<? super K, ? super V, ? extends KG> keySelector, final ValueJoiner<? super V, ? super VG, ? extends VR> joiner, final Named named) { return join(globalTable, keySelector, toValueJoinerWithKey(joiner), named); } @Override public <KG, VG, VR> KStream<K, VR> join(final GlobalKTable<KG, VG> globalTable, final KeyValueMapper<? super K, ? super V, ? extends KG> keySelector, final ValueJoinerWithKey<? super K, ? super V, ? super VG, ? extends VR> joiner, final Named named) { return globalTableJoin(globalTable, keySelector, joiner, false, named); } @Override public <KG, VG, VR> KStream<K, VR> leftJoin(final GlobalKTable<KG, VG> globalTable, final KeyValueMapper<? super K, ? super V, ? extends KG> keySelector, final ValueJoiner<? super V, ? super VG, ? extends VR> joiner) { return leftJoin(globalTable, keySelector, toValueJoinerWithKey(joiner)); } @Override public <KG, VG, VR> KStream<K, VR> leftJoin(final GlobalKTable<KG, VG> globalTable, final KeyValueMapper<? super K, ? super V, ? extends KG> keySelector, final ValueJoinerWithKey<? super K, ? super V, ? super VG, ? extends VR> joiner) { return globalTableJoin(globalTable, keySelector, joiner, true, NamedInternal.empty()); } @Override public <KG, VG, VR> KStream<K, VR> leftJoin(final GlobalKTable<KG, VG> globalTable, final KeyValueMapper<? super K, ? super V, ? extends KG> keySelector, final ValueJoiner<? super V, ? super VG, ? extends VR> joiner, final Named named) { return leftJoin(globalTable, keySelector, toValueJoinerWithKey(joiner), named); } @Override public <KG, VG, VR> KStream<K, VR> leftJoin(final GlobalKTable<KG, VG> globalTable, final KeyValueMapper<? super K, ? super V, ? extends KG> keySelector, final ValueJoinerWithKey<? super K, ? super V, ? super VG, ? extends VR> joiner, final Named named) { return globalTableJoin(globalTable, keySelector, joiner, true, named); } private <KG, VG, VR> KStream<K, VR> globalTableJoin(final GlobalKTable<KG, VG> globalTable, final KeyValueMapper<? super K, ? super V, ? extends KG> keySelector, final ValueJoinerWithKey<? super K, ? super V, ? super VG, ? extends VR> joiner, final boolean leftJoin, final Named named) { Objects.requireNonNull(globalTable, "globalTable can't be null"); Objects.requireNonNull(keySelector, "keySelector can't be null"); Objects.requireNonNull(joiner, "joiner can't be null"); Objects.requireNonNull(named, "named can't be null"); final KTableValueGetterSupplier<KG, VG> valueGetterSupplier = ((GlobalKTableImpl<KG, VG>) globalTable).valueGetterSupplier(); final String name = new NamedInternal(named).orElseGenerateWithPrefix(builder, LEFTJOIN_NAME); // Old PAPI. Needs to be migrated. @SuppressWarnings("deprecation") final org.apache.kafka.streams.processor.ProcessorSupplier<K, V> processorSupplier = new KStreamGlobalKTableJoin<>( valueGetterSupplier, joiner, keySelector, leftJoin); final ProcessorParameters<K, V, ?, ?> processorParameters = new ProcessorParameters<>(processorSupplier, name); final StreamTableJoinNode<K, V> streamTableJoinNode = new StreamTableJoinNode<>(name, processorParameters, new String[] {}, null); builder.addGraphNode(graphNode, streamTableJoinNode); // do not have serde for joined result return new KStreamImpl<>( name, keySerde, null, subTopologySourceNodes, repartitionRequired, streamTableJoinNode, builder); } @SuppressWarnings("unchecked") private <VO, VR> KStream<K, VR> doStreamTableJoin(final KTable<K, VO> table, final ValueJoinerWithKey<? super K, ? super V, ? super VO, ? extends VR> joiner, final Joined<K, V, VO> joined, final boolean leftJoin) { Objects.requireNonNull(table, "table can't be null"); Objects.requireNonNull(joiner, "joiner can't be null"); final Set<String> allSourceNodes = ensureCopartitionWith(Collections.singleton((AbstractStream<K, VO>) table)); final JoinedInternal<K, V, VO> joinedInternal = new JoinedInternal<>(joined); final NamedInternal renamed = new NamedInternal(joinedInternal.name()); final String name = renamed.orElseGenerateWithPrefix(builder, leftJoin ? LEFTJOIN_NAME : JOIN_NAME); // Old PAPI. Needs to be migrated. @SuppressWarnings("deprecation") final org.apache.kafka.streams.processor.ProcessorSupplier<K, V> processorSupplier = new KStreamKTableJoin<>( ((KTableImpl<K, ?, VO>) table).valueGetterSupplier(), joiner, leftJoin); final ProcessorParameters<K, V, ?, ?> processorParameters = new ProcessorParameters<>(processorSupplier, name); final StreamTableJoinNode<K, V> streamTableJoinNode = new StreamTableJoinNode<>( name, processorParameters, ((KTableImpl<K, ?, VO>) table).valueGetterSupplier().storeNames(), this.name ); builder.addGraphNode(graphNode, streamTableJoinNode); // do not have serde for joined result return new KStreamImpl<>( name, joined.keySerde() != null ? joined.keySerde() : keySerde, null, allSourceNodes, false, streamTableJoinNode, builder); } @Override public <KR, VR> KStream<KR, VR> transform(final TransformerSupplier<? super K, ? super V, KeyValue<KR, VR>> transformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(transformerSupplier, "transformerSupplier can't be null"); final String name = builder.newProcessorName(TRANSFORM_NAME); return flatTransform(new TransformerSupplierAdapter<>(transformerSupplier), Named.as(name), stateStoreNames); } @Override public <KR, VR> KStream<KR, VR> transform(final TransformerSupplier<? super K, ? super V, KeyValue<KR, VR>> transformerSupplier, final Named named, final String... stateStoreNames) { Objects.requireNonNull(transformerSupplier, "transformerSupplier can't be null"); return flatTransform(new TransformerSupplierAdapter<>(transformerSupplier), named, stateStoreNames); } @Override public <K1, V1> KStream<K1, V1> flatTransform(final TransformerSupplier<? super K, ? super V, Iterable<KeyValue<K1, V1>>> transformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(transformerSupplier, "transformerSupplier can't be null"); final String name = builder.newProcessorName(TRANSFORM_NAME); return flatTransform(transformerSupplier, Named.as(name), stateStoreNames); } @Override public <K1, V1> KStream<K1, V1> flatTransform(final TransformerSupplier<? super K, ? super V, Iterable<KeyValue<K1, V1>>> transformerSupplier, final Named named, final String... stateStoreNames) { Objects.requireNonNull(transformerSupplier, "transformerSupplier can't be null"); Objects.requireNonNull(named, "named can't be null"); Objects.requireNonNull(stateStoreNames, "stateStoreNames can't be a null array"); ApiUtils.checkSupplier(transformerSupplier); for (final String stateStoreName : stateStoreNames) { Objects.requireNonNull(stateStoreName, "stateStoreNames can't contain `null` as store name"); } final String name = new NamedInternal(named).name(); final StatefulProcessorNode<? super K, ? super V> transformNode = new StatefulProcessorNode<>( name, new ProcessorParameters<>(new KStreamFlatTransform<>(transformerSupplier), name), stateStoreNames); transformNode.keyChangingOperation(true); builder.addGraphNode(graphNode, transformNode); // cannot inherit key and value serde return new KStreamImpl<>( name, null, null, subTopologySourceNodes, true, transformNode, builder); } @Override public <VR> KStream<K, VR> transformValues(final ValueTransformerSupplier<? super V, ? extends VR> valueTransformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doTransformValues( toValueTransformerWithKeySupplier(valueTransformerSupplier), NamedInternal.empty(), stateStoreNames); } @Override public <VR> KStream<K, VR> transformValues(final ValueTransformerSupplier<? super V, ? extends VR> valueTransformerSupplier, final Named named, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); Objects.requireNonNull(named, "named can't be null"); return doTransformValues( toValueTransformerWithKeySupplier(valueTransformerSupplier), new NamedInternal(named), stateStoreNames); } @Override public <VR> KStream<K, VR> transformValues(final ValueTransformerWithKeySupplier<? super K, ? super V, ? extends VR> valueTransformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doTransformValues(valueTransformerSupplier, NamedInternal.empty(), stateStoreNames); } @Override public <VR> KStream<K, VR> transformValues(final ValueTransformerWithKeySupplier<? super K, ? super V, ? extends VR> valueTransformerSupplier, final Named named, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); Objects.requireNonNull(named, "named can't be null"); return doTransformValues(valueTransformerSupplier, new NamedInternal(named), stateStoreNames); } private <VR> KStream<K, VR> doTransformValues(final ValueTransformerWithKeySupplier<? super K, ? super V, ? extends VR> valueTransformerWithKeySupplier, final NamedInternal named, final String... stateStoreNames) { Objects.requireNonNull(stateStoreNames, "stateStoreNames can't be a null array"); for (final String stateStoreName : stateStoreNames) { Objects.requireNonNull(stateStoreName, "stateStoreNames can't contain `null` as store name"); } ApiUtils.checkSupplier(valueTransformerWithKeySupplier); final String name = named.orElseGenerateWithPrefix(builder, TRANSFORMVALUES_NAME); final StatefulProcessorNode<? super K, ? super V> transformNode = new StatefulProcessorNode<>( name, new ProcessorParameters<>(new KStreamTransformValues<>(valueTransformerWithKeySupplier), name), stateStoreNames); transformNode.setValueChangingOperation(true); builder.addGraphNode(graphNode, transformNode); // cannot inherit value serde return new KStreamImpl<>( name, keySerde, null, subTopologySourceNodes, repartitionRequired, transformNode, builder); } @Override public <VR> KStream<K, VR> flatTransformValues(final ValueTransformerSupplier<? super V, Iterable<VR>> valueTransformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doFlatTransformValues( toValueTransformerWithKeySupplier(valueTransformerSupplier), NamedInternal.empty(), stateStoreNames); } @Override public <VR> KStream<K, VR> flatTransformValues(final ValueTransformerSupplier<? super V, Iterable<VR>> valueTransformerSupplier, final Named named, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doFlatTransformValues( toValueTransformerWithKeySupplier(valueTransformerSupplier), named, stateStoreNames); } @Override public <VR> KStream<K, VR> flatTransformValues(final ValueTransformerWithKeySupplier<? super K, ? super V, Iterable<VR>> valueTransformerSupplier, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doFlatTransformValues(valueTransformerSupplier, NamedInternal.empty(), stateStoreNames); } @Override public <VR> KStream<K, VR> flatTransformValues(final ValueTransformerWithKeySupplier<? super K, ? super V, Iterable<VR>> valueTransformerSupplier, final Named named, final String... stateStoreNames) { Objects.requireNonNull(valueTransformerSupplier, "valueTransformerSupplier can't be null"); return doFlatTransformValues(valueTransformerSupplier, named, stateStoreNames); } private <VR> KStream<K, VR> doFlatTransformValues(final ValueTransformerWithKeySupplier<? super K, ? super V, Iterable<VR>> valueTransformerWithKeySupplier, final Named named, final String... stateStoreNames) { Objects.requireNonNull(stateStoreNames, "stateStoreNames can't be a null array"); for (final String stateStoreName : stateStoreNames) { Objects.requireNonNull(stateStoreName, "stateStoreNames can't contain `null` as store name"); } ApiUtils.checkSupplier(valueTransformerWithKeySupplier); final String name = new NamedInternal(named).orElseGenerateWithPrefix(builder, TRANSFORMVALUES_NAME); final StatefulProcessorNode<? super K, ? super V> transformNode = new StatefulProcessorNode<>( name, new ProcessorParameters<>(new KStreamFlatTransformValues<>(valueTransformerWithKeySupplier), name), stateStoreNames); transformNode.setValueChangingOperation(true); builder.addGraphNode(graphNode, transformNode); // cannot inherit value serde return new KStreamImpl<>( name, keySerde, null, subTopologySourceNodes, repartitionRequired, transformNode, builder); } @Override @Deprecated public void process(final org.apache.kafka.streams.processor.ProcessorSupplier<? super K, ? super V> processorSupplier, final String... stateStoreNames) { process(processorSupplier, Named.as(builder.newProcessorName(PROCESSOR_NAME)), stateStoreNames); } @Override public void process(final ProcessorSupplier<? super K, ? super V, Void, Void> processorSupplier, final String... stateStoreNames) { process(processorSupplier, Named.as(builder.newProcessorName(PROCESSOR_NAME)), stateStoreNames); } @Override @Deprecated public void process(final org.apache.kafka.streams.processor.ProcessorSupplier<? super K, ? super V> processorSupplier, final Named named, final String... stateStoreNames) { Objects.requireNonNull(processorSupplier, "processorSupplier can't be null"); Objects.requireNonNull(named, "named can't be null"); Objects.requireNonNull(stateStoreNames, "stateStoreNames can't be a null array"); ApiUtils.checkSupplier(processorSupplier); for (final String stateStoreName : stateStoreNames) { Objects.requireNonNull(stateStoreName, "stateStoreNames can't be null"); } final String name = new NamedInternal(named).name(); final StatefulProcessorNode<? super K, ? super V> processNode = new StatefulProcessorNode<>( name, new ProcessorParameters<>(processorSupplier, name), stateStoreNames); builder.addGraphNode(graphNode, processNode); } @Override public void process(final ProcessorSupplier<? super K, ? super V, Void, Void> processorSupplier, final Named named, final String... stateStoreNames) { Objects.requireNonNull(processorSupplier, "processorSupplier can't be null"); Objects.requireNonNull(named, "named can't be null"); Objects.requireNonNull(stateStoreNames, "stateStoreNames can't be a null array"); ApiUtils.checkSupplier(processorSupplier); for (final String stateStoreName : stateStoreNames) { Objects.requireNonNull(stateStoreName, "stateStoreNames can't be null"); } final String name = new NamedInternal(named).name(); final StatefulProcessorNode<? super K, ? super V> processNode = new StatefulProcessorNode<>( name, new ProcessorParameters<>(processorSupplier, name), stateStoreNames); builder.addGraphNode(graphNode, processNode); } }
package com.didichuxing.datachannel.agent.source.log.type; import java.io.File; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.regex.Pattern; import com.didichuxing.datachannel.agent.common.api.StandardLogType; import com.didichuxing.datachannel.agent.source.log.config.MatchConfig; import com.didichuxing.datachannel.agent.source.log.utils.FileUtils; /** * @description: public日志适配类 * @author: huangjw * @Date: 18/8/9 16:18 */ public class PublicType extends AbstractLogType { static final int LOG_KEY_LEN_LIMIT = 80; static String log_pattern = "^[a-z_]+(\\w|_)+(\\w)+$"; static SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); public PublicType() { super(StandardLogType.Public.getType()); } @Override public boolean check(File file, MatchConfig matchConfig) { List<String> contents = FileUtils.readFileContent(file, 10); if (contents != null && contents.size() != 0) { for (String content : contents) { if (!checkContent(content)) { return false; } } return true; } else { // 若文件为空,直接跳过 return false; } } private boolean checkContent(String log_cts) { String log_key_str = ""; // 记录一行记录key Set<String> set = new HashSet<>(); boolean date_field_flag = false; boolean data_format_flag = false; // kv字段、k、v String kv_pairs_str, kv_pairs_key, kv_pairs_value; // 日志包含"||" if (!log_cts.contains("||")) { return false; } // 记录第一个"||" int first_index = log_cts.indexOf("||"); String log_key_fields = log_cts.substring(0, first_index); String log_param_fields = log_cts.substring(first_index + 2, log_cts.length()); if (log_key_fields.contains(" ")) { log_key_str = log_key_fields.substring(log_key_fields.lastIndexOf(" ") + 1, log_key_fields.length()); } else { log_key_str = log_key_fields; } // 判断key的长度 if (log_key_str.length() >= LOG_KEY_LEN_LIMIT || log_key_str.length() == 0) { return false; } if (!Pattern.matches(log_pattern, log_key_str)) { return false; } // 日志内容校验 while (log_param_fields.contains("||")) { // 记录"||"位置 int kv_pairs_index = log_param_fields.indexOf("||"); kv_pairs_str = log_param_fields.substring(0, log_param_fields.indexOf("||")); kv_pairs_key = kv_pairs_str.substring(0, kv_pairs_str.indexOf("=")); kv_pairs_value = kv_pairs_str.substring(kv_pairs_str.indexOf("=") + 1, kv_pairs_str.length()); // 时间戳校验 if ("timestamp".equals(kv_pairs_key)) { date_field_flag = true; try { format.parse(kv_pairs_value); data_format_flag = true; } catch (ParseException e) { set.clear(); return data_format_flag; } } // 重复字段校验 if (set.contains(kv_pairs_key)) { return false; } set.add(kv_pairs_key); log_param_fields = log_param_fields.substring(kv_pairs_index + 2, log_param_fields.length()); } if (!date_field_flag) { return false; } set.clear(); return true; } }
package com.admin.biz.system.impl; import com.admin.biz.system.SystemMenuBiz; import com.admin.core.exception.BusinessException; import com.admin.core.constants.ResponseStatus; import com.admin.dao.system.dto.UpdateSystemMenuSortDTO; import com.admin.dao.system.model.SystemMenu; import com.admin.dao.system.vo.SystemMenuListVO; import com.admin.dao.system.vo.SystemMenuNodeVO; import com.admin.service.system.SystemMenuService; import org.springframework.beans.BeanUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import org.springframework.util.CollectionUtils; import java.util.ArrayList; import java.util.List; @Service public class SystemMenuBizImpl implements SystemMenuBiz { @Autowired private SystemMenuService systemMenuService; @Override public Integer create(SystemMenu systemMenu) { // 统计上级菜单下子菜单数量 SystemMenu countDto = new SystemMenu(); countDto.setParentId(systemMenu.getParentId()); countDto.setDeleted(Boolean.FALSE); long subMenuCount = systemMenuService.count(countDto); // 设置新建部门的顺序 systemMenu.setSort(Integer.valueOf("" + subMenuCount)); return systemMenuService.create(systemMenu); } @Override public void updateById(SystemMenu systemMenu) { SystemMenu dbMenu = systemMenuService.findById(systemMenu.getId()); // 如果上级菜单发生了变化,则重新调整菜单的排序 if (dbMenu.getParentId() != systemMenu.getParentId() && dbMenu.getParentId() != null && !dbMenu.getParentId().equals(systemMenu.getParentId())) { // 统计上级菜单下子菜单数量 SystemMenu countDto = new SystemMenu(); countDto.setParentId(systemMenu.getParentId()); countDto.setDeleted(Boolean.FALSE); long subMenuCount = systemMenuService.count(countDto); systemMenu.setSort(Integer.valueOf("" + subMenuCount)); } systemMenuService.updateById(systemMenu); } @Override public void updateSort(UpdateSystemMenuSortDTO dto) { SystemMenu currentMenu = systemMenuService.findById(dto.getId()); List<SystemMenu> menuPool; if (currentMenu.getParentId() == null) { menuPool = systemMenuService.findRootList(); } else { SystemMenu queryDto = new SystemMenu(); queryDto.setParentId(currentMenu.getParentId()); menuPool = systemMenuService.findList(queryDto); } int currentMenuIndex = 0; for (int i = 0; i < menuPool.size(); i++) { if (menuPool.get(i).getId().equals(dto.getId())) { currentMenuIndex = i; break; } } // 上移 if ("top".equals(dto.getDirection())) { if (currentMenuIndex - 1 < 0) { return; } SystemMenu preMenu = menuPool.remove(currentMenuIndex - 1); menuPool.add(currentMenuIndex, preMenu); } // 下移 else { if (currentMenuIndex + 1 > menuPool.size() - 1) { return; } SystemMenu nextMenu = menuPool.remove(currentMenuIndex + 1); menuPool.add(currentMenuIndex, nextMenu); } for (int i = 0; i < menuPool.size(); i++) { menuPool.get(i).setSort(i); } // 修改 systemMenuService.updateByIdInBatch(menuPool); } @Override public List<SystemMenuListVO> findTree() { List<SystemMenuListVO> menus = systemMenuService.findList(); List<SystemMenuListVO> rootMenus = new ArrayList<>(); // 添加根菜单 for (SystemMenu menu : menus) { if (menu.getParentId() == null) { SystemMenuListVO rootMenu = new SystemMenuListVO(); BeanUtils.copyProperties(menu, rootMenu, "children"); rootMenu.setChildren(new ArrayList<>()); rootMenus.add(rootMenu); } } menus.removeIf(menu -> menu.getParentId() == null); for (SystemMenuListVO child : rootMenus) { this.fillChildren(child, menus); } return rootMenus; } @Override public List<SystemMenuNodeVO> findTree (Integer userId) { SystemMenu queryDto = new SystemMenu(); queryDto.setDeleted(Boolean.FALSE); List<SystemMenu> menus = systemMenuService.findByUserId(userId); List<SystemMenuNodeVO> rootNodes = new ArrayList<>(); // 添加根菜单 for (SystemMenu menu : menus) { if (menu.getParentId() == null) { SystemMenuNodeVO nodeVO = new SystemMenuNodeVO(); nodeVO.setId(menu.getId()); nodeVO.setIndex("menu_" + menu.getId()); nodeVO.setLabel(menu.getName()); nodeVO.setUrl(menu.getPath()); nodeVO.setIcon(menu.getIcon()); nodeVO.setChildren(new ArrayList<>()); rootNodes.add(nodeVO); } } menus.removeIf(menu -> menu.getParentId() == null); for (SystemMenuNodeVO child : rootNodes) { this.fillChildren(child, menus); } return rootNodes; } @Override public void deleteById(Integer id) { List<Integer> ids = systemMenuService.findChildren(id); ids.add(id); for (Integer id2 : ids) { SystemMenu menu = systemMenuService.findById(id2); if (menu == null) { continue; } if (menu.getFixed()) { throw new BusinessException(ResponseStatus.NOT_ALLOWED.getCode(), "请勿删除" + menu.getName() + ", 因为这是固定菜单"); } } systemMenuService.deleteByIdInBatch(ids); } @Override @Transactional public void deleteByIdInBatch(List<Integer> ids) { if (CollectionUtils.isEmpty(ids)) { return; } for (Integer id : ids) { this.deleteById(id); } } /** * 填充子菜单 * @author Eva.Caesar Liu * @date 2021/08/31 21:16 */ private void fillChildren(SystemMenuListVO parent, List<SystemMenuListVO> menus) { if (menus.size() == 0) { return; } List<Integer> handledIds = new ArrayList<>(); for (SystemMenu menu : menus) { if (parent.getId().equals(menu.getParentId())) { SystemMenuListVO child = new SystemMenuListVO(); BeanUtils.copyProperties(menu, child, "children"); child.setChildren(new ArrayList<>()); parent.getChildren().add(child); handledIds.add(menu.getId()); } } menus.removeIf(menu -> handledIds.contains(menu.getId())); parent.setHasChildren(Boolean.TRUE); if (parent.getChildren().size() > 0) { parent.setHasChildren(Boolean.FALSE); for (SystemMenuListVO child : parent.getChildren()) { this.fillChildren(child, menus); } } } /** * 填充子菜单 * @author Eva.Caesar Liu * @date 2021/08/31 21:16 */ private void fillChildren(SystemMenuNodeVO parent, List<SystemMenu> menus) { if (menus.size() == 0) { return; } List<Integer> handledIds = new ArrayList<>(); for (SystemMenu menu : menus) { if (parent.getId().equals(menu.getParentId())) { SystemMenuNodeVO child = new SystemMenuNodeVO(); child.setId(menu.getId()); child.setLabel(menu.getName()); child.setUrl(menu.getPath()); child.setIcon(menu.getIcon()); child.setIndex("menu_" + menu.getId()); child.setChildren(new ArrayList<>()); parent.getChildren().add(child); handledIds.add(menu.getId()); } } menus.removeIf(menu -> handledIds.contains(menu.getId())); for (SystemMenuNodeVO child : parent.getChildren()) { this.fillChildren(child, menus); } } }
/** * Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved. * * You may not modify, use, reproduce, or distribute this software except in * compliance with the terms of the License at: * https://github.com/javaee/tutorial-examples/LICENSE.txt */ /* * To change this template, choose Tools | Templates and open the template in * the editor. */ package javaeetutorial.dukestutoring.web.util; import java.util.HashMap; import java.util.Map; import javax.faces.context.FacesContext; /** * * @author ievans */ public class EntityConverter { private static final String key = "dukestutoring.web.util.EntityConverter"; public EntityConverter() { } protected Map<String, Object> getViewMap(FacesContext context) { Map<String, Object> viewMap = context.getViewRoot().getViewMap(); @SuppressWarnings({"unchecked", "rawtypes"}) Map<String, Object> idMap = (Map) viewMap.get(key); if (idMap == null) { idMap = new HashMap<>(); viewMap.put(key, idMap); } return idMap; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.physical.impl.scan; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import org.apache.drill.categories.RowSetTests; import org.apache.drill.common.types.TypeProtos.DataMode; import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.exec.physical.impl.protocol.SchemaTracker; import org.apache.drill.exec.physical.impl.scan.ScanTestUtils.MockScanBuilder; import org.apache.drill.exec.physical.impl.scan.project.ReaderSchemaOrchestrator; import org.apache.drill.exec.physical.impl.scan.project.ScanSchemaOrchestrator; import org.apache.drill.exec.physical.impl.scan.project.ScanSchemaOrchestrator.ScanOrchestratorBuilder; import org.apache.drill.exec.physical.resultSet.ResultSetLoader; import org.apache.drill.exec.record.BatchSchema; import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode; import org.apache.drill.exec.record.VectorContainer; import org.apache.drill.exec.record.BatchSchemaBuilder; import org.apache.drill.exec.record.metadata.SchemaBuilder; import org.apache.drill.exec.record.metadata.TupleMetadata; import org.apache.drill.test.SubOperatorTest; import org.apache.drill.exec.physical.rowSet.RowSetTestUtils; import org.apache.drill.exec.physical.rowSet.RowSet.SingleRowSet; import org.apache.drill.test.rowSet.RowSetUtilities; import org.junit.Test; import org.junit.experimental.categories.Category; /** * Test the early-schema support of the scan orchestrator. "Early schema" * refers to the case in which the reader can provide a schema when the * reader is opened. Examples: CSV, HBase, MapR-DB binary, JDBC. * <p> * The tests here focus on the scan orchestrator itself; the tests assume * that tests for lower-level components have already passed. */ @Category(RowSetTests.class) public class TestScanOrchestratorEarlySchema extends SubOperatorTest { /** * Test SELECT * from an early-schema table of (a, b) */ @Test public void testEarlySchemaWildcard() { ScanOrchestratorBuilder builder = new MockScanBuilder(); // SELECT * ... builder.projection(RowSetTestUtils.projectAll()); ScanSchemaOrchestrator scanner = new ScanSchemaOrchestrator(fixture.allocator(), builder); // ... FROM table ReaderSchemaOrchestrator reader = scanner.startReader(); // file schema (a, b) TupleMetadata tableSchema = new SchemaBuilder() .add("a", MinorType.INT) .add("b", MinorType.VARCHAR) .buildSchema(); // Create the table loader ResultSetLoader loader = reader.makeTableLoader(tableSchema); // Simulate a first reader in a scan that can provide an // empty batch to define schema. { reader.defineSchema(); SingleRowSet expected = fixture.rowSetBuilder(tableSchema) .build(); assertNotNull(scanner.output()); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); } // Create a batch of data. reader.startBatch(); loader.writer() .addRow(1, "fred") .addRow(2, "wilma"); reader.endBatch(); // Verify { SingleRowSet expected = fixture.rowSetBuilder(tableSchema) .addRow(1, "fred") .addRow(2, "wilma") .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); } // Second batch. reader.startBatch(); loader.writer() .addRow(3, "barney") .addRow(4, "betty"); reader.endBatch(); // Verify { SingleRowSet expected = fixture.rowSetBuilder(tableSchema) .addRow(3, "barney") .addRow(4, "betty") .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); } // Explicit reader close. (All other tests are lazy, they // use an implicit close.) scanner.closeReader(); scanner.close(); } /** * Test SELECT a, b FROM table(a, b) */ @Test public void testEarlySchemaSelectAll() { ScanOrchestratorBuilder builder = new MockScanBuilder(); // SELECT a, b ... builder.projection(RowSetTestUtils.projectList("a", "b")); ScanSchemaOrchestrator scanner = new ScanSchemaOrchestrator(fixture.allocator(), builder); // ... FROM table ReaderSchemaOrchestrator reader = scanner.startReader(); // file schema (a, b) TupleMetadata tableSchema = new SchemaBuilder() .add("a", MinorType.INT) .add("b", MinorType.VARCHAR) .buildSchema(); // Create the table loader ResultSetLoader loader = reader.makeTableLoader(tableSchema); // Don't bother with an empty batch here or in other tests. // Simulates the second reader in a scan. // Create a batch of data. reader.startBatch(); loader.writer() .addRow(1, "fred") .addRow(2, "wilma"); reader.endBatch(); // Verify SingleRowSet expected = fixture.rowSetBuilder(tableSchema) .addRow(1, "fred") .addRow(2, "wilma") .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); scanner.close(); } /** * Test SELECT b, a FROM table(a, b) */ @Test public void testEarlySchemaSelectAllReorder() { ScanOrchestratorBuilder builder = new MockScanBuilder(); // SELECT b, a ... builder.projection(RowSetTestUtils.projectList("b", "a")); ScanSchemaOrchestrator scanner = new ScanSchemaOrchestrator(fixture.allocator(), builder); // ... FROM table ReaderSchemaOrchestrator reader = scanner.startReader(); // file schema (a, b) TupleMetadata tableSchema = new SchemaBuilder() .add("a", MinorType.INT) .add("b", MinorType.VARCHAR) .buildSchema(); // Create the table loader ResultSetLoader loader = reader.makeTableLoader(tableSchema); TupleMetadata expectedSchema = new SchemaBuilder() .add("b", MinorType.VARCHAR) .add("a", MinorType.INT) .buildSchema(); // Create a batch of data. reader.startBatch(); loader.writer() .addRow(1, "fred") .addRow(2, "wilma"); reader.endBatch(); // Verify SingleRowSet expected = fixture.rowSetBuilder(expectedSchema) .addRow("fred", 1) .addRow("wilma", 2) .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); scanner.close(); } /** * Test SELECT a, b, c FROM table(a, b) * c will be null */ @Test public void testEarlySchemaSelectExtra() { ScanOrchestratorBuilder builder = new MockScanBuilder(); // SELECT a, b, c ... builder.projection(RowSetTestUtils.projectList("a", "b", "c")); ScanSchemaOrchestrator scanner = new ScanSchemaOrchestrator(fixture.allocator(), builder); // ... FROM table ReaderSchemaOrchestrator reader = scanner.startReader(); // file schema (a, b) TupleMetadata tableSchema = new SchemaBuilder() .add("a", MinorType.INT) .add("b", MinorType.VARCHAR) .buildSchema(); // Create the table loader ResultSetLoader loader = reader.makeTableLoader(tableSchema); TupleMetadata expectedSchema = new SchemaBuilder() .add("a", MinorType.INT) .add("b", MinorType.VARCHAR) .addNullable("c", MinorType.INT) .buildSchema(); // Create a batch of data. reader.startBatch(); loader.writer() .addRow(1, "fred") .addRow(2, "wilma"); reader.endBatch(); // Verify SingleRowSet expected = fixture.rowSetBuilder(expectedSchema) .addRow(1, "fred", null) .addRow(2, "wilma", null) .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); scanner.close(); } /** * Test SELECT a, b, c FROM table(a, b) * c will be null of type VARCHAR */ @Test public void testEarlySchemaSelectExtraCustomType() { ScanOrchestratorBuilder builder = new MockScanBuilder(); // Null columns of type VARCHAR MajorType nullType = MajorType.newBuilder() .setMinorType(MinorType.VARCHAR) .setMode(DataMode.OPTIONAL) .build(); builder.nullType(nullType); // SELECT a, b, c ... builder.projection(RowSetTestUtils.projectList("a", "b", "c")); ScanSchemaOrchestrator scanner = new ScanSchemaOrchestrator(fixture.allocator(), builder); // ... FROM table ... ReaderSchemaOrchestrator reader = scanner.startReader(); // file schema (a, b) TupleMetadata tableSchema = new SchemaBuilder() .add("a", MinorType.INT) .add("b", MinorType.VARCHAR) .buildSchema(); // Create the table loader ResultSetLoader loader = reader.makeTableLoader(tableSchema); TupleMetadata expectedSchema = new SchemaBuilder() .add("a", MinorType.INT) .add("b", MinorType.VARCHAR) .addNullable("c", MinorType.VARCHAR) .buildSchema(); // Create a batch of data. reader.startBatch(); loader.writer() .addRow(1, "fred") .addRow(2, "wilma"); reader.endBatch(); // Verify SingleRowSet expected = fixture.rowSetBuilder(expectedSchema) .addRow(1, "fred", null) .addRow(2, "wilma", null) .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); scanner.close(); } /** * Test SELECT a FROM table(a, b) */ @Test public void testEarlySchemaSelectSubset() { ScanOrchestratorBuilder builder = new MockScanBuilder(); // SELECT a ... builder.projection(RowSetTestUtils.projectList("a")); ScanSchemaOrchestrator scanner = new ScanSchemaOrchestrator(fixture.allocator(), builder); // ... FROM table ReaderSchemaOrchestrator reader = scanner.startReader(); // file schema (a, b) TupleMetadata tableSchema = new SchemaBuilder() .add("a", MinorType.INT) .add("b", MinorType.VARCHAR) .buildSchema(); // Create the table loader ResultSetLoader loader = reader.makeTableLoader(tableSchema); // Verify that unprojected column is unprojected in the // table loader. assertFalse(loader.writer().column("b").isProjected()); TupleMetadata expectedSchema = new SchemaBuilder() .add("a", MinorType.INT) .buildSchema(); // Create a batch of data. reader.startBatch(); loader.writer() .addRow(1, "fred") .addRow(2, "wilma"); reader.endBatch(); // Verify SingleRowSet expected = fixture.rowSetBuilder(expectedSchema) .addRow(1) .addRow(2) .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); scanner.close(); } /** * Test SELECT - FROM table(a, b) */ @Test public void testEarlySchemaSelectNone() { ScanOrchestratorBuilder builder = new MockScanBuilder(); // SELECT ... // (Like SELECT COUNT(*) ... builder.projection(RowSetTestUtils.projectList()); ScanSchemaOrchestrator scanner = new ScanSchemaOrchestrator(fixture.allocator(), builder); // ... FROM table ReaderSchemaOrchestrator reader = scanner.startReader(); // file schema (a, b) TupleMetadata tableSchema = new SchemaBuilder() .add("a", MinorType.INT) .add("b", MinorType.VARCHAR) .buildSchema(); // Create the table loader ResultSetLoader loader = reader.makeTableLoader(tableSchema); // Verify that unprojected column is unprojected in the // table loader. assertTrue(loader.isProjectionEmpty()); assertFalse(loader.writer().column("a").isProjected()); assertFalse(loader.writer().column("b").isProjected()); // Verify empty batch. BatchSchema expectedSchema = new BatchSchemaBuilder() .withSchemaBuilder(new SchemaBuilder()) .build(); // Create a batch of data. reader.startBatch(); loader.writer() .addRow(1, "fred") .addRow(2, "wilma"); reader.endBatch(); // Verify { // Two rows, no data. SingleRowSet expected = fixture.rowSetBuilder(expectedSchema) .addRow() .addRow() .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); } // Fast path to fill in empty rows reader.startBatch(); loader.skipRows(10); reader.endBatch(); // Verify { VectorContainer output = scanner.output(); assertEquals(10, output.getRecordCount()); output.zeroVectors(); } scanner.close(); } /** * Test SELECT * from an early-schema table of () (that is, * a schema that consists of zero columns. */ @Test public void testEmptySchema() { ScanOrchestratorBuilder builder = new MockScanBuilder(); // SELECT * ... builder.projection(RowSetTestUtils.projectAll()); ScanSchemaOrchestrator scanner = new ScanSchemaOrchestrator(fixture.allocator(), builder); // ... FROM table ReaderSchemaOrchestrator reader = scanner.startReader(); // file schema () TupleMetadata tableSchema = new SchemaBuilder() .buildSchema(); // Create the table loader reader.makeTableLoader(tableSchema); // Create a batch of data. Because there are no columns, it does // not make sense to ready any rows. reader.startBatch(); reader.endBatch(); // Verify { SingleRowSet expected = fixture.rowSetBuilder(tableSchema) .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); } scanner.close(); } /** * Test SELECT a from an early-schema table of () (that is, * a schema that consists of zero columns. */ @Test public void testEmptySchemaExtra() { ScanOrchestratorBuilder builder = new MockScanBuilder(); // SELECT * ... builder.projection(RowSetTestUtils.projectList("a")); ScanSchemaOrchestrator scanner = new ScanSchemaOrchestrator(fixture.allocator(), builder); // ... FROM table ReaderSchemaOrchestrator reader = scanner.startReader(); // file schema () TupleMetadata tableSchema = new SchemaBuilder() .buildSchema(); // Create the table loader reader.makeTableLoader(tableSchema); TupleMetadata expectedSchema = new SchemaBuilder() .addNullable("a", MinorType.INT) .buildSchema(); // Create a batch of data. Because there are no columns, it does // not make sense to ready any rows. reader.startBatch(); reader.endBatch(); // Verify SingleRowSet expected = fixture.rowSetBuilder(expectedSchema) .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); scanner.close(); } /** * The projection mechanism provides "type smoothing": null * columns prefer the type of previously-seen non-null columns. * * <code><pre> * SELECT a, b ... * * Table 1: (a: BIGINT, b: VARCHAR) * Table 2: (a: BIGINT) * Table 3: (b: VARCHAR) * </pre></code> * The result in all cases should be * <tt>(a : BIGINT, b: VARCHAR)</tt> */ @Test public void testTypeSmoothingExplicit() { ScanOrchestratorBuilder builder = new MockScanBuilder(); TupleMetadata table1Schema = new SchemaBuilder() .add("A", MinorType.BIGINT) .addNullable("B", MinorType.VARCHAR) .addArray("C", MinorType.INT) .buildSchema(); BatchSchema resultSchema = new BatchSchema(SelectionVectorMode.NONE, table1Schema.toFieldList()); SchemaTracker tracker = new SchemaTracker(); // SELECT * ... builder.projection(RowSetTestUtils.projectList("a", "b", "c")); ScanSchemaOrchestrator scanner = new ScanSchemaOrchestrator(fixture.allocator(), builder); int schemaVersion; { // ... FROM table1(a, b, c) ReaderSchemaOrchestrator reader = scanner.startReader(); reader.makeTableLoader(table1Schema); reader.defineSchema(); VectorContainer output = scanner.output(); tracker.trackSchema(output); schemaVersion = tracker.schemaVersion(); assertTrue(resultSchema.isEquivalent(output.getSchema())); scanner.closeReader(); } { // ... FROM table1(a, c) // // B is dropped. But, it is nullable, so the vector cache // can supply the proper type to ensure continuity. TupleMetadata table2Schema = new SchemaBuilder() .add("A", MinorType.BIGINT) .addArray("C", MinorType.INT) .buildSchema(); ReaderSchemaOrchestrator reader = scanner.startReader(); reader.makeTableLoader(table2Schema); reader.defineSchema(); VectorContainer output = scanner.output(); tracker.trackSchema(output); assertEquals(schemaVersion, tracker.schemaVersion()); assertTrue(resultSchema.isEquivalent(output.getSchema())); scanner.closeReader(); } { // ... FROM table1(a, b) // // C is dropped. But, it is an array, which uses zero-elements // to indicate null, so the vector cache can fill in the type. TupleMetadata table3Schema = new SchemaBuilder() .add("A", MinorType.BIGINT) .addNullable("B", MinorType.VARCHAR) .buildSchema(); ReaderSchemaOrchestrator reader = scanner.startReader(); reader.makeTableLoader(table3Schema); reader.defineSchema(); VectorContainer output = scanner.output(); tracker.trackSchema(output); assertEquals(schemaVersion, tracker.schemaVersion()); assertTrue(resultSchema.isEquivalent(output.getSchema())); scanner.closeReader(); } { // ... FROM table1(b, c) // // This version carries over a non-nullable BIGINT, but that // can't become a null column, so nullable BIGINT is substituted, // result in a schema change. TupleMetadata table2Schema = new SchemaBuilder() .addNullable("B", MinorType.VARCHAR) .addArray("C", MinorType.INT) .buildSchema(); ReaderSchemaOrchestrator reader = scanner.startReader(); reader.makeTableLoader(table2Schema); reader.defineSchema(); VectorContainer output = scanner.output(); tracker.trackSchema(output); assertEquals(MinorType.BIGINT, output.getSchema().getColumn(0).getType().getMinorType()); assertEquals(DataMode.OPTIONAL, output.getSchema().getColumn(0).getType().getMode()); assertTrue(schemaVersion < tracker.schemaVersion()); scanner.closeReader(); } scanner.close(); } /** * Test the ability of the scan scanner to "smooth" out schema changes * by reusing the type from a previous reader, if known. That is, * given three readers:<br> * (a, b)<br> * (b)<br> * (a, b)<br> * Then the type of column a should be preserved for the second reader that * does not include a. This works if a is nullable. If so, a's type will * be used for the empty column, rather than the usual nullable int. * <p> * Detailed testing of type matching for "missing" columns is done * in {@link #testNullColumnLoader()}. * <p> * As a side effect, makes sure that two identical tables (in this case, * separated by a different table) results in no schema change. */ @Test public void testTypeSmoothing() { ScanOrchestratorBuilder builder = new MockScanBuilder(); // SELECT a, b ... builder.projection(RowSetTestUtils.projectList("a", "b")); ScanSchemaOrchestrator scanner = new ScanSchemaOrchestrator(fixture.allocator(), builder); // file schema (a, b) TupleMetadata twoColSchema = new SchemaBuilder() .add("a", MinorType.INT) .addNullable("b", MinorType.VARCHAR, 10) .buildSchema(); SchemaTracker tracker = new SchemaTracker(); int schemaVersion; { // ... FROM table 1 ReaderSchemaOrchestrator reader = scanner.startReader(); ResultSetLoader loader = reader.makeTableLoader(twoColSchema); // Projection of (a, b) to (a, b) reader.startBatch(); loader.writer() .addRow(10, "fred") .addRow(20, "wilma"); reader.endBatch(); tracker.trackSchema(scanner.output()); schemaVersion = tracker.schemaVersion(); SingleRowSet expected = fixture.rowSetBuilder(twoColSchema) .addRow(10, "fred") .addRow(20, "wilma") .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); } { // ... FROM table 2 ReaderSchemaOrchestrator reader = scanner.startReader(); // File schema (a) TupleMetadata oneColSchema = new SchemaBuilder() .add("a", MinorType.INT) .buildSchema(); // Projection of (a) to (a, b), reusing b from above. ResultSetLoader loader = reader.makeTableLoader(oneColSchema); reader.startBatch(); loader.writer() .addRow(30) .addRow(40); reader.endBatch(); tracker.trackSchema(scanner.output()); assertEquals(schemaVersion, tracker.schemaVersion()); SingleRowSet expected = fixture.rowSetBuilder(twoColSchema) .addRow(30, null) .addRow(40, null) .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); } { // ... FROM table 3 ReaderSchemaOrchestrator reader = scanner.startReader(); // Projection of (a, b), to (a, b), reusing b yet again ResultSetLoader loader = reader.makeTableLoader(twoColSchema); reader.startBatch(); loader.writer() .addRow(50, "dino") .addRow(60, "barney"); reader.endBatch(); tracker.trackSchema(scanner.output()); assertEquals(schemaVersion, tracker.schemaVersion()); SingleRowSet expected = fixture.rowSetBuilder(twoColSchema) .addRow(50, "dino") .addRow(60, "barney") .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); } scanner.close(); } @Test public void testModeSmoothing() { ScanOrchestratorBuilder builder = new MockScanBuilder(); builder.enableSchemaSmoothing(true); builder.projection(RowSetTestUtils.projectList("a")); ScanSchemaOrchestrator scanner = new ScanSchemaOrchestrator(fixture.allocator(), builder); // Most general schema: nullable, with precision. TupleMetadata schema1 = new SchemaBuilder() .addNullable("a", MinorType.VARCHAR, 10) .buildSchema(); SchemaTracker tracker = new SchemaTracker(); int schemaVersion; { // Table 1: most permissive type ReaderSchemaOrchestrator reader = scanner.startReader(); ResultSetLoader loader = reader.makeTableLoader(schema1); // Create a batch reader.startBatch(); loader.writer() .addRow("fred") .addRow("wilma"); reader.endBatch(); tracker.trackSchema(scanner.output()); schemaVersion = tracker.schemaVersion(); // Verify SingleRowSet expected = fixture.rowSetBuilder(schema1) .addRow("fred") .addRow("wilma") .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); scanner.closeReader(); } { // Table 2: required, use nullable // Required version. TupleMetadata schema2 = new SchemaBuilder() .add("a", MinorType.VARCHAR, 10) .buildSchema(); ReaderSchemaOrchestrator reader = scanner.startReader(); ResultSetLoader loader = reader.makeTableLoader(schema2); // Create a batch reader.startBatch(); loader.writer() .addRow("barney") .addRow("betty"); reader.endBatch(); // Verify, using persistent schema tracker.trackSchema(scanner.output()); assertEquals(schemaVersion, tracker.schemaVersion()); SingleRowSet expected = fixture.rowSetBuilder(schema1) .addRow("barney") .addRow("betty") .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); scanner.closeReader(); } { // Table 3: narrower precision, use wider // Required version with narrower precision. TupleMetadata schema3 = new SchemaBuilder() .add("a", MinorType.VARCHAR, 5) .buildSchema(); ReaderSchemaOrchestrator reader = scanner.startReader(); ResultSetLoader loader = reader.makeTableLoader(schema3); // Create a batch reader.startBatch(); loader.writer() .addRow("bam-bam") .addRow("pebbles"); reader.endBatch(); // Verify, using persistent schema tracker.trackSchema(scanner.output()); assertEquals(schemaVersion, tracker.schemaVersion()); SingleRowSet expected = fixture.rowSetBuilder(schema1) .addRow("bam-bam") .addRow("pebbles") .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); scanner.closeReader(); } scanner.close(); } /** * Verify that different table column orders are projected into the * SELECT order, preserving vectors, so no schema change for column * reordering. */ @Test public void testColumnReordering() { ScanOrchestratorBuilder builder = new MockScanBuilder(); builder.enableSchemaSmoothing(true); builder.projection(RowSetTestUtils.projectList("a", "b", "c")); ScanSchemaOrchestrator scanner = new ScanSchemaOrchestrator(fixture.allocator(), builder); TupleMetadata schema1 = new SchemaBuilder() .add("a", MinorType.INT) .addNullable("b", MinorType.VARCHAR, 10) .add("c", MinorType.BIGINT) .buildSchema(); TupleMetadata schema2 = new SchemaBuilder() .add("c", MinorType.BIGINT) .add("a", MinorType.INT) .addNullable("b", MinorType.VARCHAR, 10) .buildSchema(); TupleMetadata schema3 = new SchemaBuilder() .add("a", MinorType.INT) .add("c", MinorType.BIGINT) .addNullable("b", MinorType.VARCHAR, 10) .buildSchema(); SchemaTracker tracker = new SchemaTracker(); int schemaVersion; { // ... FROM table 1 ReaderSchemaOrchestrator reader = scanner.startReader(); // Projection of (a, b, c) to (a, b, c) ResultSetLoader loader = reader.makeTableLoader(schema1); reader.startBatch(); loader.writer() .addRow(10, "fred", 110L) .addRow(20, "wilma", 110L); reader.endBatch(); tracker.trackSchema(scanner.output()); schemaVersion = tracker.schemaVersion(); SingleRowSet expected = fixture.rowSetBuilder(schema1) .addRow(10, "fred", 110L) .addRow(20, "wilma", 110L) .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); scanner.closeReader(); } { // ... FROM table 2 ReaderSchemaOrchestrator reader = scanner.startReader(); // Projection of (c, a, b) to (a, b, c) ResultSetLoader loader = reader.makeTableLoader(schema2); reader.startBatch(); loader.writer() .addRow(330L, 30, "bambam") .addRow(440L, 40, "betty"); reader.endBatch(); tracker.trackSchema(scanner.output()); assertEquals(schemaVersion, tracker.schemaVersion()); SingleRowSet expected = fixture.rowSetBuilder(schema1) .addRow(30, "bambam", 330L) .addRow(40, "betty", 440L) .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); } { // ... FROM table 3 ReaderSchemaOrchestrator reader = scanner.startReader(); // Projection of (a, c, b) to (a, b, c) ResultSetLoader loader = reader.makeTableLoader(schema3); reader.startBatch(); loader.writer() .addRow(50, 550L, "dino") .addRow(60, 660L, "barney"); reader.endBatch(); tracker.trackSchema(scanner.output()); assertEquals(schemaVersion, tracker.schemaVersion()); SingleRowSet expected = fixture.rowSetBuilder(schema1) .addRow(50, "dino", 550L) .addRow(60, "barney", 660L) .build(); RowSetUtilities.verify(expected, fixture.wrap(scanner.output())); } scanner.close(); } // TODO: Start with early schema, but add columns }
package com.github.peacetrue.log.aspect; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; /** * @author xiayx */ @Aspect public class AnnotationLogAspect extends LogAspect { @Around(value = "@annotation(logPointcut)") public Object around(ProceedingJoinPoint joinPoint, LogPointcut logPointcut) throws Throwable { return super.around(joinPoint); } }
package bluetooth; import java.io.IOException; import java.util.Scanner; import java.util.List; import javax.bluetooth.RemoteDevice; public class Main { public static void main(String[] args) throws IOException,InterruptedException { BluetoothConnector btConnector = new BluetoothConnector(); int nDevices = 0, nServices = 0, count = 1, indexChoice = -1; Scanner scanner; RemoteDevice[] btDevices; List<String> deviceServices; String chosenService; System.out.print("Searching for devices...\n"); while(nDevices <= 0) { nDevices = btConnector.searchDevices(); if(nDevices == 0) System.out.printf("No devices were found! Retrying...\n"); } System.out.printf("%d devices were found\n", nDevices); btDevices = btConnector.getDevicesArray(); for(RemoteDevice dev : btDevices) { try { System.out.printf("%d) %s (%s)\n", count++, dev.getBluetoothAddress(), dev.getFriendlyName(false)); } catch (IOException e) { // unable to get friendly name, just print without it System.out.printf("%d) %s\n", count++, dev.getBluetoothAddress()); } } scanner = new Scanner(System.in); // crude input validation while(indexChoice <= -1 || indexChoice >= nDevices) { System.out.print("Choose device: "); indexChoice = scanner.nextInt() - 1; } try { System.out.printf("Searching what services are available for device %s...\n", btDevices[indexChoice].getFriendlyName(false)); } catch (IOException e) { // unable to get friendly name, just print without it System.out.printf("Searching what services are available for device %s...\n", btDevices[indexChoice].getBluetoothAddress()); } count = 1; nServices = btConnector.searchServicesFromDevice(btDevices[indexChoice]); System.out.printf("%d services found\n", nServices); deviceServices = btConnector.getServicesFromDevice(btDevices[indexChoice]); for(String service : deviceServices) System.out.printf("%d) %s\n", count++, service); indexChoice = -1; while(indexChoice <= -1 || indexChoice >= deviceServices.size()) { System.out.print("Choose service: "); indexChoice = scanner.nextInt() - 1; } scanner.close(); chosenService = deviceServices.get(indexChoice); System.out.printf("Connecting to service %s\n", chosenService); if(btConnector.connect(chosenService)) { System.out.print("Connected!\n"); } else { System.out.print("Failed connecting to the chosen service\n"); System.exit(1); } System.out.print("Sending test message through OBEX...\n"); try { btConnector.sendTestMessage(); System.out.print("Test message was sent successfully\n"); } catch(IOException failedSendingMsg) { System.out.print("An error has occured sending the test message\n"); } System.out.print("Exiting...\n"); } }
package org.netmelody.cieye.server.observation.protocol.test; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasProperty; import org.junit.After; import org.junit.Test; import org.netmelody.cieye.server.observation.protocol.JsonRestRequester; import com.google.gson.GsonBuilder; public final class JsonRestRequesterTest { private final DummyServer server = new DummyServer(); private final JsonRestRequester requester = new JsonRestRequester(new GsonBuilder().create()); @After public void shutdownServer() { requester.shutdown(); server.close(); } @Test public void makesASuccessfulRequest() { server.respondWith("{ \"property\": \"value\" }"); final DummyJson result = requester.makeJsonRestCall("http://localhost:" + server.port() + "/", DummyJson.class); assertThat(result, hasProperty("property", equalTo("value"))); } public static final class DummyJson { private String property; public String getProperty() { return this.property; } @Override public String toString() { return "DummyJson with property:" + this.property; } } }
package org.dfm.funding.data.service.repository.config; import org.springframework.boot.autoconfigure.domain.EntityScan; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.data.jpa.repository.config.EnableJpaRepositories; import org.dfm.funding.data.service.domain.port.ObtainFundingData Service; import org.dfm.funding.data.service.repository.FundingData ServiceRepository; import org.dfm.funding.data.service.repository.dao.FundingData ServiceDao; @Configuration @EntityScan("org.dfm.funding.data.service.repository.entity") @EnableJpaRepositories("org.dfm.funding.data.service.repository.dao") public class JpaAdapterConfig { @Bean public ObtainFundingData Service getFundingData ServiceRepository(FundingData ServiceDao fundingDataServiceDao) { return new FundingData ServiceRepository(fundingDataServiceDao); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.syncope.core.persistence.jpa.entity; import java.util.ArrayList; import java.util.List; import javax.persistence.Cacheable; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.Table; import org.apache.syncope.common.lib.types.IdMImplementationType; import javax.persistence.UniqueConstraint; import org.apache.syncope.core.persistence.api.entity.Implementation; import org.apache.syncope.core.persistence.api.entity.OIDCProvider; import org.apache.syncope.core.persistence.api.entity.OIDCProviderItem; import org.apache.syncope.core.persistence.jpa.entity.resource.AbstractItem; @Entity @Table(name = JPAOIDCProviderItem.TABLE) @Cacheable public class JPAOIDCProviderItem extends AbstractItem implements OIDCProviderItem { public static final String TABLE = "OIDCProviderItem"; private static final long serialVersionUID = -6903418265811089724L; @ManyToOne private JPAOIDCProvider op; @Override public OIDCProvider getOP() { return op; } @Override public void setOP(final OIDCProvider op) { checkType(op, JPAOIDCProvider.class); this.op = (JPAOIDCProvider) op; } @ManyToMany(fetch = FetchType.EAGER) @JoinTable(name = TABLE + "Transformer", joinColumns = @JoinColumn(name = "item_id"), inverseJoinColumns = @JoinColumn(name = "implementation_id"), uniqueConstraints = @UniqueConstraint(columnNames = { "item_id", "implementation_id" })) private List<JPAImplementation> transformers = new ArrayList<>(); @Override public boolean add(final Implementation transformer) { checkType(transformer, JPAImplementation.class); checkImplementationType(transformer, IdMImplementationType.ITEM_TRANSFORMER); return transformers.contains((JPAImplementation) transformer) || this.transformers.add((JPAImplementation) transformer); } @Override public List<? extends Implementation> getTransformers() { return transformers; } }
package com.cgfy.mybatis.generator.plugins; import org.apache.commons.lang3.StringUtils; import org.mybatis.generator.api.IntrospectedColumn; import org.mybatis.generator.api.IntrospectedTable; import org.mybatis.generator.api.dom.java.FullyQualifiedJavaType; import org.mybatis.generator.api.dom.java.TopLevelClass; import java.util.ArrayList; import java.util.List; public class UpdateConditionInputBeanPlugin extends AbstractBeanCreatePlugin { private static final String BEAN_KEY = "UPDATE_CONDITION_INPUT_BEAN"; private static final String CLASS_NAME = "#TYPE#UpdateConditionInputBean"; protected void addFieldAnnotation(TopLevelClass topLevelClass, IntrospectedTable introspectedTable, IntrospectedColumn introspectedColumn, List<String> annotationList) { } protected List<IntrospectedColumn> columnList(IntrospectedTable introspectedTable) { List<IntrospectedColumn> columnList = new ArrayList(); return columnList; } protected void addValidationNotNull(TopLevelClass topLevelClass, IntrospectedTable introspectedTable, IntrospectedColumn introspectedColumn, List<String> annotationList) { } protected void addClassAnnotation(TopLevelClass topLevelClass, IntrospectedTable introspectedTable) { FullyQualifiedJavaType javaType = new FullyQualifiedJavaType(this.properties.getProperty("updateSuperClass")); javaType.addTypeArgument( new FullyQualifiedJavaType((String) introspectedTable.getAttribute("UPDATE_INPUT_BEAN"))); topLevelClass.setSuperClass(javaType); } protected String getClassComment(IntrospectedTable introspectedTable) { return introspectedTable.getRemarks() + "条件用更新用Bean"; } protected String createClassName(IntrospectedTable introspectedTable) { FullyQualifiedJavaType baseRecordType = new FullyQualifiedJavaType(introspectedTable.getBaseRecordType()); String basePackage = this.properties.getProperty("basePackage"); String inputClassName = this.properties.getProperty("fileName"); if (StringUtils.isEmpty(inputClassName)) { inputClassName = "#TYPE#UpdateConditionInputBean"; } String className = inputClassName.replaceAll("#TYPE#", baseRecordType.getShortName()); return StringUtils.join(new String[] { basePackage, className }, "."); } protected String getBeanKey() { return "UPDATE_CONDITION_INPUT_BEAN"; } }
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.admanager.jaxws.v202011; import java.util.List; import javax.jws.WebMethod; import javax.jws.WebParam; import javax.jws.WebResult; import javax.jws.WebService; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.ws.RequestWrapper; import javax.xml.ws.ResponseWrapper; /** * * Provides methods for adding, updating and retrieving {@link Proposal} objects. * * * This class was generated by the JAX-WS RI. * JAX-WS RI 2.2.9-b130926.1035 * Generated source version: 2.1 * */ @WebService(name = "ProposalServiceInterface", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011") @XmlSeeAlso({ ObjectFactory.class }) public interface ProposalServiceInterface { /** * * Creates new {@link Proposal} objects. * * For each proposal, the following fields are required: * <ul> * <li>{@link Proposal#name}</li> * </ul> * * @param proposals the proposals to create * @return the created proposals with their IDs filled in * * * @param proposals * @return * returns java.util.List<com.google.api.ads.admanager.jaxws.v202011.Proposal> * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011") @RequestWrapper(localName = "createProposals", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011", className = "com.google.api.ads.admanager.jaxws.v202011.ProposalServiceInterfacecreateProposals") @ResponseWrapper(localName = "createProposalsResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011", className = "com.google.api.ads.admanager.jaxws.v202011.ProposalServiceInterfacecreateProposalsResponse") public List<Proposal> createProposals( @WebParam(name = "proposals", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011") List<Proposal> proposals) throws ApiException_Exception ; /** * * Gets a {@link MarketplaceCommentPage} of {@link MarketplaceComment} objects that satisfy the * given {@link Statement#query}. This method only returns comments already sent to Marketplace, * local draft {@link ProposalMarketplaceInfo#marketplaceComment} are not included. The following * fields are supported for filtering: * * <table> * <tr> * <th scope="col">PQL Property</th> * <th scope="col">Object Property</th> * </tr> * <tr> * <td>{@code proposalId}</td> * <td>{@link MarketplaceComment#proposalId}</td> * </tr> * </table> * * The query must specify a {@code proposalId}, and only supports a subset of PQL syntax:<br> * <code>[WHERE <condition> {AND <condition> ...}]</code><br> * <code>[ORDER BY <property> [ASC | DESC]]</code><br> * <code>[LIMIT {[<offset>,] <count>} | {<count> OFFSET <offset>}]</code><br> * * <p><code><condition></code><br> * &nbsp;&nbsp;&nbsp;&nbsp; <code>:= <property> = <value></code><br> * <code><condition> := <property> IN <list></code><br> * Only supports {@code ORDER BY} {@link MarketplaceComment#creationTime}. * * @param filterStatement a Publisher Query Language statement used to filter a set of marketplace * comments * @return the marketplace comments that match the given filter * * * @param filterStatement * @return * returns com.google.api.ads.admanager.jaxws.v202011.MarketplaceCommentPage * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011") @RequestWrapper(localName = "getMarketplaceCommentsByStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011", className = "com.google.api.ads.admanager.jaxws.v202011.ProposalServiceInterfacegetMarketplaceCommentsByStatement") @ResponseWrapper(localName = "getMarketplaceCommentsByStatementResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011", className = "com.google.api.ads.admanager.jaxws.v202011.ProposalServiceInterfacegetMarketplaceCommentsByStatementResponse") public MarketplaceCommentPage getMarketplaceCommentsByStatement( @WebParam(name = "filterStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011") Statement filterStatement) throws ApiException_Exception ; /** * * Gets a {@link ProposalPage} of {@link Proposal} objects that satisfy the given * {@link Statement#query}. The following fields are supported for filtering: * * <table> * <tr> * <th scope="col">PQL Property</th> * <th scope="col">Object Property</th> * </tr> * <tr> * <td>{@code id}</td> * <td>{@link Proposal#id}</td> * </tr> * <tr> * <td>{@code dfpOrderId}</td> * <td>{@link Proposal#dfpOrderId}</td> * </tr> * <tr> * <td>{@code name}</td> * <td>{@link Proposal#name}</td> * </tr> * <tr> * <td>{@code status}</td> * <td>{@link Proposal#status}</td> * </tr> * <tr> * <td>{@code isArchived}</td> * <td>{@link Proposal#isArchived}</td> * </tr> * <tr> * <td> * {@code approvalStatus} * <div class="constraint">Only applicable for proposals using sales management</div> * </td> * <td>{@link Proposal#approvalStatus}</td> * </tr> * <tr> * <td>{@code lastModifiedDateTime}</td> * <td>{@link Proposal#lastModifiedDateTime}</td> * </tr> * <tr> * <td> * {@code thirdPartyAdServerId} * <div class="constraint"> * Only applicable for non-programmatic proposals using sales management * </div> * </td> * <td>{@link Proposal#thirdPartyAdServerId}</td> * </tr> * <tr> * <td> * {@code customThirdPartyAdServerName} * <div class="constraint"> * Only applicable for non-programmatic proposals using sales management * </div> * </td> * <td>{@link Proposal#customThirdPartyAdServerName}</td> * </tr> * <tr> * <td>{@code hasOfflineErrors}</td> * <td>{@link Proposal#hasOfflineErrors}</td> * </tr> * <tr> * <td>{@code isProgrammatic}</td> * <td>{@link Proposal#isProgrammatic}</td> * </tr> * <tr> * <td> * {@code negotiationStatus} * <div class="constraint">Only applicable for programmatic proposals</div> * </td> * <td>{@link ProposalMarketplaceInfo#negotiationStatus}</td> * </tr> * </table> * * @param filterStatement a Publisher Query Language statement used to filter * a set of proposals * @return the proposals that match the given filter * * * @param filterStatement * @return * returns com.google.api.ads.admanager.jaxws.v202011.ProposalPage * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011") @RequestWrapper(localName = "getProposalsByStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011", className = "com.google.api.ads.admanager.jaxws.v202011.ProposalServiceInterfacegetProposalsByStatement") @ResponseWrapper(localName = "getProposalsByStatementResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011", className = "com.google.api.ads.admanager.jaxws.v202011.ProposalServiceInterfacegetProposalsByStatementResponse") public ProposalPage getProposalsByStatement( @WebParam(name = "filterStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011") Statement filterStatement) throws ApiException_Exception ; /** * * Performs actions on {@link Proposal} objects that match the given {@link Statement#query}. * * The following fields are also required when submitting proposals for approval: * <ul> * <li>{@link Proposal#advertiser}</li> * <li>{@link Proposal#primarySalesperson}</li> * <li>{@link Proposal#primaryTraffickerId}</li> * </ul> * * @param proposalAction the action to perform * @param filterStatement a Publisher Query Language statement used to filter a set of proposals * @return the result of the action performed * * * @param filterStatement * @param proposalAction * @return * returns com.google.api.ads.admanager.jaxws.v202011.UpdateResult * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011") @RequestWrapper(localName = "performProposalAction", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011", className = "com.google.api.ads.admanager.jaxws.v202011.ProposalServiceInterfaceperformProposalAction") @ResponseWrapper(localName = "performProposalActionResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011", className = "com.google.api.ads.admanager.jaxws.v202011.ProposalServiceInterfaceperformProposalActionResponse") public UpdateResult performProposalAction( @WebParam(name = "proposalAction", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011") ProposalAction proposalAction, @WebParam(name = "filterStatement", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011") Statement filterStatement) throws ApiException_Exception ; /** * * Updates the specified {@link Proposal} objects. * * @param proposals the proposals to update * @return the updated proposals * * * @param proposals * @return * returns java.util.List<com.google.api.ads.admanager.jaxws.v202011.Proposal> * @throws ApiException_Exception */ @WebMethod @WebResult(name = "rval", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011") @RequestWrapper(localName = "updateProposals", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011", className = "com.google.api.ads.admanager.jaxws.v202011.ProposalServiceInterfaceupdateProposals") @ResponseWrapper(localName = "updateProposalsResponse", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011", className = "com.google.api.ads.admanager.jaxws.v202011.ProposalServiceInterfaceupdateProposalsResponse") public List<Proposal> updateProposals( @WebParam(name = "proposals", targetNamespace = "https://www.google.com/apis/ads/publisher/v202011") List<Proposal> proposals) throws ApiException_Exception ; }
package com.dtstack.jlogstash.inputs; import com.fasterxml.jackson.databind.ObjectMapper; //import com.fasterxml.jackson.module.afterburner.AfterburnerModule; /** * * @author sishu.yss * */ public class JsonUtils { public final static ObjectMapper mapper = new ObjectMapper(); // public final static ObjectMapper mapper = new ObjectMapper().registerModule(new AfterburnerModule()); }
package edu.psu.sweng500.emrms.services; import edu.psu.sweng500.emrms.model.HAllergy; import edu.psu.sweng500.emrms.model.HDiagnosis; import edu.psu.sweng500.emrms.service.ManageAllergyService; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import static org.hamcrest.CoreMatchers.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; @WebAppConfiguration @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = {"classpath:/spring/test-emrms-context.xml"}) public class ManageAllergyServiceTest { @Autowired @Qualifier("manageAllergyService") private ManageAllergyService service; @Before public void setUp() { verifySpringAnnotations(); } @After public void tearDown() { //ToDO //delete from h_Diagnosis where hDiagnosisID = 1; } public void verifySpringAnnotations() { assertThat(service, instanceOf(ManageAllergyService.class)); } @Test public void testAddAllergy() { HAllergy allergy = new HAllergy(); allergy.setAllergyID(1); allergy.setUserId("admin"); allergy.setAllergyCode("ALGCODE"); allergy.setAllergyName("NUT Allergy"); allergy.setAllergyType(1); allergy.setSeverity("High"); allergy.setPatientID(3); // ToDo int returnValue = service.AddAllergy(allergy); assertEquals(0,returnValue); } @Test public void testDeleteAllergy()throws Exception{ HAllergy allergy=new HAllergy(); allergy.setAllergyID(1); allergy.setPatientID(3); //ToDo int returnValue=service.DeleteAllergy(allergy); assertEquals(0,returnValue); } }
/******************************************************************************* * Copyright (c) 2015, 2016 * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. *******************************************************************************/ package jsettlers.logic.buildings.others; import jsettlers.common.buildings.EBuildingType; import jsettlers.common.mapobject.EMapObjectType; import jsettlers.common.position.ShortPoint2D; import jsettlers.logic.buildings.Building; import jsettlers.logic.buildings.IBuildingsGrid; import jsettlers.logic.player.Player; /** * This is a default building. It can be used for every building type that has no real function. * * @author Andreas Eberle * */ public final class DefaultBuilding extends Building { private static final long serialVersionUID = 1L; public DefaultBuilding(EBuildingType buildingType, Player player, ShortPoint2D position, IBuildingsGrid buildingsGrid) { super(buildingType, player, position, buildingsGrid); setOccupied(true); } @Override protected int subTimerEvent() { return -1; } @Override protected int constructionFinishedEvent() { return -1; } @Override protected EMapObjectType getFlagType() { return EMapObjectType.FLAG_DOOR; } }
package cc.momas.smbms.entity; import java.util.Date; public class Role { private Integer createdBy; private Date creationDate; private Integer modifyBy; private Date modifyDate; private String roleCode; private Integer roleId; private String roleName; public Role() { super(); } public Role(Integer roleId, String roleCode, String roleName, Integer createBy, Date creationDate, Integer modifyBy, Date modifyDate) { super(); this.roleId = roleId; this.roleCode = roleCode; this.roleName = roleName; this.createdBy = createBy; this.creationDate = creationDate; this.modifyBy = modifyBy; this.modifyDate = modifyDate; } public Integer getCreatedBy() { return createdBy; } public Date getCreationDate() { return creationDate; } public Integer getModifyBy() { return modifyBy; } public Date getModifyDate() { return modifyDate; } public String getRoleCode() { return roleCode; } public Integer getRoleId() { return roleId; } public String getRoleName() { return roleName; } public void setCreatedBy(Integer createdBy) { this.createdBy = createdBy; } public void setCreationDate(Date creationDate) { this.creationDate = creationDate; } public void setModifyBy(Integer modifyBy) { this.modifyBy = modifyBy; } public void setModifyDate(Date modifyDate) { this.modifyDate = modifyDate; } public void setRoleCode(String roleCode) { this.roleCode = roleCode; } public void setRoleId(Integer roleId) { this.roleId = roleId; } public void setRoleName(String roleName) { this.roleName = roleName; } @Override public String toString() { return "Role [roleId=" + roleId + ", roleCode=" + roleCode + ", roleName=" + roleName + ", createBy=" + createdBy + ", creationDate=" + creationDate + ", modifyBy=" + modifyBy + ", modifyDate=" + modifyDate + "]"; } }
// Copyright 2017 JanusGraph Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package org.janusgraph.blueprints.process.traversal.strategy.optimization; import org.apache.tinkerpop.gremlin.process.traversal.Traversal; import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies; import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategy; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.DefaultGraphTraversal; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__; import org.apache.tinkerpop.gremlin.process.traversal.step.map.GraphStep; import org.apache.tinkerpop.gremlin.process.traversal.step.util.HasContainer; import org.apache.tinkerpop.gremlin.process.traversal.strategy.optimization.FilterRankingStrategy; import org.apache.tinkerpop.gremlin.process.traversal.strategy.optimization.InlineFilterStrategy; import org.apache.tinkerpop.gremlin.process.traversal.util.DefaultTraversalStrategies; import org.apache.tinkerpop.gremlin.structure.T; import org.apache.tinkerpop.gremlin.structure.Vertex; import org.apache.tinkerpop.gremlin.process.traversal.P; import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal; import org.janusgraph.core.JanusGraph; import org.janusgraph.graphdb.tinkerpop.optimize.JanusGraphStep; import org.janusgraph.graphdb.tinkerpop.optimize.JanusGraphStepStrategy; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import static org.apache.tinkerpop.gremlin.process.traversal.P.eq; import static org.apache.tinkerpop.gremlin.process.traversal.P.gt; import static org.apache.tinkerpop.gremlin.process.traversal.P.lt; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.filter; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.has; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.not; import static org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__.properties; import static org.junit.Assert.assertEquals; @RunWith(Parameterized.class) public class JanusGraphStepStrategyTest { @Parameterized.Parameter(value = 0) public Traversal original; @Parameterized.Parameter(value = 1) public Traversal optimized; @Parameterized.Parameter(value = 2) public Collection<TraversalStrategy> otherStrategies; @Test public void doTest() { final TraversalStrategies strategies = new DefaultTraversalStrategies(); strategies.addStrategies(JanusGraphStepStrategy.instance()); for (final TraversalStrategy strategy : this.otherStrategies) { strategies.addStrategies(strategy); } this.original.asAdmin().setStrategies(strategies); this.original.asAdmin().applyStrategies(); assertEquals(this.optimized, this.original); } private static GraphTraversal.Admin<?, ?> g_V(final Object... hasKeyValues) { final GraphTraversal.Admin<?, ?> traversal = new DefaultGraphTraversal<>(); final JanusGraphStep<Vertex, Vertex> graphStep = new JanusGraphStep<>(new GraphStep<>(traversal, Vertex.class, true)); for (int i = 0; i < hasKeyValues.length; i = i + 2) { if(hasKeyValues[i].equals(T.id)) { graphStep.addIds(Arrays.asList(hasKeyValues[i + 1])); } else { graphStep.addHasContainer(new HasContainer((String) hasKeyValues[i], (P) hasKeyValues[i + 1])); } } return traversal.addStep(graphStep); } @Parameterized.Parameters(name = "{0}") public static Iterable<Object[]> generateTestParameters() { return Arrays.asList(new Object[][]{ {__.V().out(), g_V().out(), Collections.emptyList()}, {__.V().has("name", "marko").out(), g_V("name", eq("marko")).out(), Collections.emptyList()}, {__.V().has("name", "marko").has("age", gt(31).and(lt(10))).out(), g_V("name", eq("marko"), "age", gt(31), "age", lt(10)).out(), Collections.emptyList()}, {__.V().has("name", "marko").or(has("age"), has("age", gt(32))).has("lang", "java"), g_V("name", eq("marko"), "lang", eq("java")).or(has("age"), has("age", gt(32))), Collections.singletonList(FilterRankingStrategy.instance())}, {__.V().has("name", "marko").as("a").or(has("age"), has("age", gt(32))).has("lang", "java"), g_V("name", eq("marko")).as("a").or(has("age"), has("age", gt(32))).has("lang", "java"), Collections.emptyList()}, {__.V().has("name", "marko").as("a").or(has("age"), has("age", gt(32))).has("lang", "java"), g_V("name", eq("marko"), "lang", eq("java")).or(has("age"), has("age", gt(32))).as("a"), Collections.singletonList(FilterRankingStrategy.instance())}, {__.V().dedup().has("name", "marko").or(has("age"), has("age", gt(32))).has("lang", "java"), g_V("name", eq("marko"), "lang", eq("java")).or(has("age"), has("age", gt(32))).dedup(), Collections.singletonList(FilterRankingStrategy.instance())}, {__.V().as("a").dedup().has("name", "marko").or(has("age"), has("age", gt(32))).has("lang", "java"), g_V("name", eq("marko"), "lang", eq("java")).or(has("age"), has("age", gt(32))).dedup().as("a"), Collections.singletonList(FilterRankingStrategy.instance())}, {__.V().as("a").has("name", "marko").as("b").or(has("age"), has("age", gt(32))).has("lang", "java"), g_V("name", eq("marko"), "lang", eq("java")).or(has("age"), has("age", gt(32))).as("b", "a"), Collections.singletonList(FilterRankingStrategy.instance())}, {__.V().as("a").dedup().has("name", "marko").or(has("age"), has("age", gt(32))).filter(has("name", "bob")).has("lang", "java"), g_V("name", eq("marko"), "lang", eq("java"), "name", eq("bob")).or(has("age"), has("age", gt(32))).dedup().as("a"), Arrays.asList(InlineFilterStrategy.instance(), FilterRankingStrategy.instance())}, {__.V().has("name", "marko").or(not(has("age")), has("age", gt(32))).has("name", "bob").has("lang", "java"), g_V("name", eq("marko"), "name", eq("bob"), "lang", eq("java")).or(not(filter(properties("age"))), has("age", gt(32))), TraversalStrategies.GlobalCache.getStrategies(JanusGraph.class).toList()}, {__.V().has("name", eq("marko").and(eq("bob").and(eq("stephen")))).out("knows"), g_V("name", eq("marko"), "name", eq("bob"), "name", eq("stephen")).out("knows"), Collections.emptyList()}, {__.V().hasId(1), g_V(T.id, 1), Collections.emptyList()}, {__.V().hasId(1).hasId(2), g_V(T.id, 1, T.id, 2), Collections.emptyList()}, {__.V().hasId(1).has("name", "marko"), g_V(T.id, 1, "name", eq("marko")), Collections.emptyList()} }); } }
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.nativebinaries; import org.gradle.api.Incubating; import org.gradle.api.Named; /** * Specifies a build-type for a native binary. Common build types are 'debug' and 'release', but others may be defined. */ @Incubating public interface BuildType extends Named { /** * Returns a human-consumable name for this build type. */ String getDisplayName(); }
/* This file is part of the iText (R) project. Copyright (c) 1998-2020 iText Group NV Authors: iText Software. This program is free software; you can redistribute it and/or modify it under the terms of the GNU Affero General Public License version 3 as published by the Free Software Foundation with the addition of the following permission added to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED BY ITEXT GROUP. ITEXT GROUP DISCLAIMS THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program; if not, see http://www.gnu.org/licenses or write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA, 02110-1301 USA, or download the license from the following URL: http://itextpdf.com/terms-of-use/ The interactive user interfaces in modified source and object code versions of this program must display Appropriate Legal Notices, as required under Section 5 of the GNU Affero General Public License. In accordance with Section 7(b) of the GNU Affero General Public License, a covered work must retain the producer line in every PDF that is created or manipulated using iText. You can be released from the requirements of the license by purchasing a commercial license. Buying such a license is mandatory as soon as you develop commercial activities involving the iText software without disclosing the source code of your own applications. These activities include: offering paid services to customers as an ASP, serving PDFs on the fly in a web application, shipping iText with a closed source product. For more information, please contact iText Software Corp. at this address: sales@itextpdf.com */ package com.itextpdf.tool.xml.examples.css.text_layout; import com.itextpdf.tool.xml.examples.SampleTest; public class Small002Test extends SampleTest{ protected String getTestName() { return "small002"; } }
package app.com.oath.micro.server.dist.lock.rest; import org.springframework.stereotype.Component; import com.oath.micro.server.dist.lock.DistributedLockService; @Component public class DummyLock implements DistributedLockService { @Override public boolean tryLock(String key) { if (key.equals("dummy-key")) { return true; } return false; } @Override public boolean tryReleaseLock(String key) { return false; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/gaming/v1beta/game_server_deployments.proto package com.google.cloud.gaming.v1beta; /** * * * <pre> * Request message for GameServerDeploymentsService.ListGameServerDeployments. * </pre> * * Protobuf type {@code google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest} */ public final class ListGameServerDeploymentsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest) ListGameServerDeploymentsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListGameServerDeploymentsRequest.newBuilder() to construct. private ListGameServerDeploymentsRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListGameServerDeploymentsRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; orderBy_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListGameServerDeploymentsRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListGameServerDeploymentsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); parent_ = s; break; } case 16: { pageSize_ = input.readInt32(); break; } case 26: { java.lang.String s = input.readStringRequireUtf8(); pageToken_ = s; break; } case 34: { java.lang.String s = input.readStringRequireUtf8(); filter_ = s; break; } case 42: { java.lang.String s = input.readStringRequireUtf8(); orderBy_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gaming.v1beta.GameServerDeployments .internal_static_google_cloud_gaming_v1beta_ListGameServerDeploymentsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gaming.v1beta.GameServerDeployments .internal_static_google_cloud_gaming_v1beta_ListGameServerDeploymentsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest.class, com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; private volatile java.lang.Object parent_; /** * * * <pre> * Required. The parent resource name. Uses the form: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent resource name. Uses the form: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_; /** * * * <pre> * Optional. The maximum number of items to return. If unspecified, the * server will pick an appropriate default. The server may return fewer items * than requested. A caller should only rely on response's * [next_page_token][google.cloud.gaming.v1beta.ListGameServerDeploymentsResponse.next_page_token] * to determine if there are more GameServerDeployments left to be queried. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; private volatile java.lang.Object pageToken_; /** * * * <pre> * Optional. The next_page_token value returned from a previous List request, * if any. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. The next_page_token value returned from a previous List request, * if any. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; private volatile java.lang.Object filter_; /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ORDER_BY_FIELD_NUMBER = 5; private volatile java.lang.Object orderBy_; /** * * * <pre> * Optional. Specifies the ordering of results following syntax at * https://cloud.google.com/apis/design/design_patterns#sorting_order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The orderBy. */ @java.lang.Override public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } } /** * * * <pre> * Optional. Specifies the ordering of results following syntax at * https://cloud.google.com/apis/design/design_patterns#sorting_order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for orderBy. */ @java.lang.Override public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest)) { return super.equals(obj); } com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest other = (com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getOrderBy().equals(other.getOrderBy())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + ORDER_BY_FIELD_NUMBER; hash = (53 * hash) + getOrderBy().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for GameServerDeploymentsService.ListGameServerDeployments. * </pre> * * Protobuf type {@code google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest) com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gaming.v1beta.GameServerDeployments .internal_static_google_cloud_gaming_v1beta_ListGameServerDeploymentsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gaming.v1beta.GameServerDeployments .internal_static_google_cloud_gaming_v1beta_ListGameServerDeploymentsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest.class, com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest.Builder.class); } // Construct using com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; orderBy_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.gaming.v1beta.GameServerDeployments .internal_static_google_cloud_gaming_v1beta_ListGameServerDeploymentsRequest_descriptor; } @java.lang.Override public com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest getDefaultInstanceForType() { return com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest build() { com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest buildPartial() { com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest result = new com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest(this); result.parent_ = parent_; result.pageSize_ = pageSize_; result.pageToken_ = pageToken_; result.filter_ = filter_; result.orderBy_ = orderBy_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest) { return mergeFrom((com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest other) { if (other == com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; onChanged(); } if (!other.getOrderBy().isEmpty()) { orderBy_ = other.orderBy_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent resource name. Uses the form: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent resource name. Uses the form: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent resource name. Uses the form: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; onChanged(); return this; } /** * * * <pre> * Required. The parent resource name. Uses the form: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); onChanged(); return this; } /** * * * <pre> * Required. The parent resource name. Uses the form: * `projects/{project}/locations/{location}`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. The maximum number of items to return. If unspecified, the * server will pick an appropriate default. The server may return fewer items * than requested. A caller should only rely on response's * [next_page_token][google.cloud.gaming.v1beta.ListGameServerDeploymentsResponse.next_page_token] * to determine if there are more GameServerDeployments left to be queried. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. The maximum number of items to return. If unspecified, the * server will pick an appropriate default. The server may return fewer items * than requested. A caller should only rely on response's * [next_page_token][google.cloud.gaming.v1beta.ListGameServerDeploymentsResponse.next_page_token] * to determine if there are more GameServerDeployments left to be queried. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; onChanged(); return this; } /** * * * <pre> * Optional. The maximum number of items to return. If unspecified, the * server will pick an appropriate default. The server may return fewer items * than requested. A caller should only rely on response's * [next_page_token][google.cloud.gaming.v1beta.ListGameServerDeploymentsResponse.next_page_token] * to determine if there are more GameServerDeployments left to be queried. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. The next_page_token value returned from a previous List request, * if any. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The next_page_token value returned from a previous List request, * if any. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The next_page_token value returned from a previous List request, * if any. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; onChanged(); return this; } /** * * * <pre> * Optional. The next_page_token value returned from a previous List request, * if any. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); onChanged(); return this; } /** * * * <pre> * Optional. The next_page_token value returned from a previous List request, * if any. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; onChanged(); return this; } /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); onChanged(); return this; } /** * * * <pre> * Optional. The filter to apply to list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; onChanged(); return this; } private java.lang.Object orderBy_ = ""; /** * * * <pre> * Optional. Specifies the ordering of results following syntax at * https://cloud.google.com/apis/design/design_patterns#sorting_order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The orderBy. */ public java.lang.String getOrderBy() { java.lang.Object ref = orderBy_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); orderBy_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Specifies the ordering of results following syntax at * https://cloud.google.com/apis/design/design_patterns#sorting_order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for orderBy. */ public com.google.protobuf.ByteString getOrderByBytes() { java.lang.Object ref = orderBy_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); orderBy_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Specifies the ordering of results following syntax at * https://cloud.google.com/apis/design/design_patterns#sorting_order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The orderBy to set. * @return This builder for chaining. */ public Builder setOrderBy(java.lang.String value) { if (value == null) { throw new NullPointerException(); } orderBy_ = value; onChanged(); return this; } /** * * * <pre> * Optional. Specifies the ordering of results following syntax at * https://cloud.google.com/apis/design/design_patterns#sorting_order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearOrderBy() { orderBy_ = getDefaultInstance().getOrderBy(); onChanged(); return this; } /** * * * <pre> * Optional. Specifies the ordering of results following syntax at * https://cloud.google.com/apis/design/design_patterns#sorting_order. * </pre> * * <code>string order_by = 5 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for orderBy to set. * @return This builder for chaining. */ public Builder setOrderByBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); orderBy_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest) private static final com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest(); } public static com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListGameServerDeploymentsRequest> PARSER = new com.google.protobuf.AbstractParser<ListGameServerDeploymentsRequest>() { @java.lang.Override public ListGameServerDeploymentsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListGameServerDeploymentsRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListGameServerDeploymentsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListGameServerDeploymentsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.gaming.v1beta.ListGameServerDeploymentsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package com.manju.ownersapi; import java.util.ArrayList; import java.util.List; import javax.validation.constraints.NotNull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.RestController; import com.manju.entities.OwnerEntity; import com.manju.exceptions.ErrorGenerator; import com.manju.exceptions.InvalidInputException; import com.manju.exceptions.ResourceNotFoundException; import com.manju.mapper.EntityMapper; import com.manju.petowner.swagger.api.OwnersApi; import com.manju.petowner.swagger.model.Error; import com.manju.petowner.swagger.model.Owner; import com.manju.repositories.OwnerRepository; @CrossOrigin @RestController public class OwnersResource implements OwnersApi { @Autowired private OwnerRepository ownerRepository; @Override public ResponseEntity<List<Owner>> getOwners() { // TODO Auto-generated method stub List<Owner> owners = new ArrayList<Owner>(); Iterable<OwnerEntity> ownersIterator = ownerRepository.findAll(); ownersIterator.forEach((owner) -> { owners.add(EntityMapper.mapOwnerEntityToOwner(owner)); }); return new ResponseEntity<List<Owner>>(owners, HttpStatus.OK); } @Override public ResponseEntity<Owner> getOwnerbyownerId(@NotNull String ownerId) { // TODO Auto-generated method stub Long ownerIdLong = 0L; try { ownerIdLong = Long.parseLong(ownerId); } catch (Exception e) { Error error = ErrorGenerator.getError(10006, "Invalid ownerId", "PetId pass is invalid, please pass the right ownerId"); throw new InvalidInputException(error); } OwnerEntity ownerEntity = null; try { ownerEntity = ownerRepository.findById(ownerIdLong).get(); }catch(Exception e) { Error error = ErrorGenerator.getError(10007, "Owner resource not found", "Owner not found"); throw new ResourceNotFoundException(error); } Owner owner = EntityMapper.mapOwnerEntityToOwner(ownerEntity); return new ResponseEntity<Owner>(owner, HttpStatus.OK); } }
#set( $symbol_pound = '#' ) #set( $symbol_dollar = '$' ) #set( $symbol_escape = '\' ) package ${package}.web.controller; import java.io.IOException; import java.net.URL; import java.util.ArrayList; import java.util.List; import javax.servlet.FilterChain; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.io.Resource; import org.springframework.core.io.support.PathMatchingResourcePatternResolver; import org.springframework.security.authentication.AuthenticationTrustResolverImpl; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.core.userdetails.User; import org.springframework.security.web.FilterInvocation; import org.springframework.security.web.access.expression.WebSecurityExpressionRoot; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import com.fasterxml.jackson.databind.ObjectMapper; import ${package}.core.security.ClientSecurityContext; @Controller public class HomeController { static final Logger log = LoggerFactory.getLogger(HomeController.class); @Autowired ObjectMapper objectMapper; @RequestMapping("/") public String index(Model model, HttpServletRequest request, HttpServletResponse response) throws IOException { ServletRequest req = (ServletRequest) request; ServletResponse resp = (ServletResponse) response; FilterInvocation filterInvocation = new FilterInvocation(req, resp, new FilterChain() { public void doFilter(ServletRequest request, ServletResponse response) throws IOException, ServletException { throw new UnsupportedOperationException(); } }); Authentication authentication = SecurityContextHolder.getContext().getAuthentication(); if (authentication != null) { if(authentication.getPrincipal() instanceof User){ ((User)authentication.getPrincipal()).eraseCredentials(); } WebSecurityExpressionRoot sec = new WebSecurityExpressionRoot(authentication, filterInvocation); sec.setTrustResolver(new AuthenticationTrustResolverImpl()); ClientSecurityContext clientSecurityContext = new ClientSecurityContext(); clientSecurityContext.setAnonymous(sec.isAnonymous()); clientSecurityContext.setAuthenticated(sec.isAuthenticated()); clientSecurityContext.setFullyAuthenticated(sec.isFullyAuthenticated()); clientSecurityContext.setPrincipal(sec.getPrincipal()); clientSecurityContext.setRememberMe(sec.isRememberMe()); clientSecurityContext.setAuthorities(sec.getAuthentication().getAuthorities()); String securityContextString = objectMapper.writeValueAsString(clientSecurityContext); model.addAttribute("security", securityContextString); } List<String> resourceArray = new ArrayList<String>(); getResourcePaths("classpath:/public/modules/*/*.js", resourceArray); getResourcePaths("classpath:/public/modules/*/config/*.js", resourceArray); getResourcePaths("classpath:/public/modules/*/controllers/*.js", resourceArray); getResourcePaths("classpath:/public/modules/*/services/*.js", resourceArray); getResourcePaths("classpath:/public/modules/*/directives/*.js", resourceArray); model.addAttribute("moduleResources", resourceArray); List<String> cssResourceArray = new ArrayList<String>(); getResourcePaths("classpath:/public/modules/*/css/*.css", cssResourceArray); model.addAttribute("moduleCSSResources", cssResourceArray); return "../public/index"; } private List<String> getResourcePaths(String pattern, List<String> resourceArray) { log.info(String.format("Finding embedded resource paths for: %s", pattern)); PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver(); try { Resource[] moduleResources = resolver.getResources(pattern); for (Resource resource : moduleResources) { log.debug(String.format("Processing resource: %s", resource)); URL resourceURL = resource.getURL(); log.debug(String.format("Found resource URL: %s", resourceURL)); resourceArray.add(truncateBeginningOfPath(resourceURL.getPath(), "/public/")); } } catch (IOException e) { log.error(String.format("Error processing resources for pattern: %s", pattern)); } return resourceArray; } private String truncateBeginningOfPath(String fullPath, String stringToMatch) { if (fullPath == null || fullPath.length() == 0) { throw new RuntimeException("fullPath is null or empty."); } if (stringToMatch == null || stringToMatch.length() == 0) { throw new RuntimeException("stringToMatch is null or empty."); } int matchIndex = fullPath.indexOf(stringToMatch); int splitIndex = matchIndex + stringToMatch.length(); if (matchIndex == -1) { return fullPath; } else { return fullPath.substring(splitIndex); } } }
package io.nuls.network.protocol.message; import io.nuls.kernel.exception.NulsException; import io.nuls.kernel.utils.NulsByteBuffer; import io.nuls.network.constant.NetworkConstant; public class GetVersionMessage extends BaseNetworkMessage<NetworkMessageBody>{ /** * 初始化基础消息的消息头 */ public GetVersionMessage() { super(NetworkConstant.NETWORK_GET_VERSION); } @Override protected NetworkMessageBody parseMessageBody(NulsByteBuffer byteBuffer) throws NulsException { return byteBuffer.readNulsData(new NetworkMessageBody()); } public GetVersionMessage(NetworkMessageBody body) { this(); this.setMsgBody(body); } }