gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright (C) 2017 anthzh89@gmail.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.anthzh.tool.viewhelper.wrapper;
import android.content.Intent;
import android.graphics.drawable.Drawable;
import android.text.Editable;
import android.view.View;
import android.widget.AbsListView;
import android.widget.AbsListView.MultiChoiceModeListener;
import android.widget.AbsListView.OnScrollListener;
import android.widget.AbsListView.RecyclerListener;
import android.widget.ListAdapter;
import java.util.List;
@SuppressWarnings("unchecked")
public class AbsListViewWrapper<W extends AbsListViewWrapper<W>> extends AdapterViewWrapper<W> {
protected AbsListView mView;
public AbsListViewWrapper(AbsListView view) {
super(view);
mView = view;
}
/**
* @see AbsListView#setOnScrollListener(OnScrollListener)
*/
public W setOnScrollListener(OnScrollListener l) {
mView.setOnScrollListener(l);
return (W) this;
}
/**
* @see AbsListView#smoothScrollToPositionFromTop(int, int)
*/
public W smoothScrollToPositionFromTop(int position, int offset) {
mView.smoothScrollToPositionFromTop(position, offset);
return (W) this;
}
/**
* @see AbsListView#smoothScrollToPositionFromTop(int, int, int)
*/
public W smoothScrollToPositionFromTop(int position, int offset, int duration) {
mView.smoothScrollToPositionFromTop(position, offset, duration);
return (W) this;
}
/**
* @see AbsListView#setSmoothScrollbarEnabled(boolean)
*/
public W setSmoothScrollbarEnabled(boolean enabled) {
mView.setSmoothScrollbarEnabled(enabled);
return (W) this;
}
/**
* @see AbsListView#setScrollIndicators(View, View)
*/
public W setScrollIndicators(View up, View down) {
mView.setScrollIndicators(up, down);
return (W) this;
}
/**
* @see AbsListView#setRecyclerListener(RecyclerListener)
*/
public W setRecyclerListener(RecyclerListener listener) {
mView.setRecyclerListener(listener);
return (W) this;
}
/**
* @see AbsListView#setCacheColorHint(int)
*/
public W setCacheColorHint(int color) {
mView.setCacheColorHint(color);
return (W) this;
}
/**
* @see AbsListView#setFastScrollAlwaysVisible(boolean)
*/
public W setFastScrollAlwaysVisible(boolean alwaysShow) {
mView.setFastScrollAlwaysVisible(alwaysShow);
return (W) this;
}
/**
* @see AbsListView#setFastScrollEnabled(boolean)
*/
public W setFastScrollEnabled(boolean enabled) {
mView.setFastScrollEnabled(enabled);
return (W) this;
}
/**
* @see AbsListView#setTranscriptMode(int)
*/
public W setTranscriptMode(int mode) {
mView.setTranscriptMode(mode);
return (W) this;
}
/**
* @see AbsListView#beforeTextChanged(CharSequence, int, int, int)
*/
public W beforeTextChanged(CharSequence s, int start, int count, int after) {
mView.beforeTextChanged(s, start, count, after);
return (W) this;
}
/**
* @see AbsListView#setDrawSelectorOnTop(boolean)
*/
public W setDrawSelectorOnTop(boolean onTop) {
mView.setDrawSelectorOnTop(onTop);
return (W) this;
}
/**
* @see AbsListView#setMultiChoiceModeListener(MultiChoiceModeListener)
*/
public W setMultiChoiceModeListener(MultiChoiceModeListener listener) {
mView.setMultiChoiceModeListener(listener);
return (W) this;
}
/**
* @see AbsListView#setScrollingCacheEnabled(boolean)
*/
public W setScrollingCacheEnabled(boolean enabled) {
mView.setScrollingCacheEnabled(enabled);
return (W) this;
}
/**
* @see AbsListView#setRemoteViewsAdapter(Intent)
*/
public W setRemoteViewsAdapter(Intent intent) {
mView.setRemoteViewsAdapter(intent);
return (W) this;
}
/**
* @see AbsListView#setStackFromBottom(boolean)
*/
public W setStackFromBottom(boolean stackFromBottom) {
mView.setStackFromBottom(stackFromBottom);
return (W) this;
}
/**
* @see AbsListView#smoothScrollToPosition(int, int)
*/
public W smoothScrollToPosition(int position, int boundPosition) {
mView.smoothScrollToPosition(position, boundPosition);
return (W) this;
}
/**
* @see AbsListView#smoothScrollToPosition(int)
*/
public W smoothScrollToPosition(int position) {
mView.smoothScrollToPosition(position);
return (W) this;
}
/**
* @see AbsListView#deferNotifyDataSetChanged()
*/
public W deferNotifyDataSetChanged() {
mView.deferNotifyDataSetChanged();
return (W) this;
}
/**
* @see AbsListView#setTextFilterEnabled(boolean)
*/
public W setTextFilterEnabled(boolean textFilterEnabled) {
mView.setTextFilterEnabled(textFilterEnabled);
return (W) this;
}
/**
* @see AbsListView#setFriction(float)
*/
public W setFriction(float friction) {
mView.setFriction(friction);
return (W) this;
}
/**
* @see AbsListView#invalidateViews()
*/
public W invalidateViews() {
mView.invalidateViews();
return (W) this;
}
/**
* @see AbsListView#smoothScrollBy(int, int)
*/
public W smoothScrollBy(int distance, int duration) {
mView.smoothScrollBy(distance, duration);
return (W) this;
}
/**
* @see AbsListView#reclaimViews(List)
*/
public W reclaimViews(List views) {
mView.reclaimViews(views);
return (W) this;
}
/**
* @see AbsListView#clearTextFilter()
*/
public W clearTextFilter() {
mView.clearTextFilter();
return (W) this;
}
/**
* @see AbsListView#afterTextChanged(Editable)
*/
public W afterTextChanged(Editable s) {
mView.afterTextChanged(s);
return (W) this;
}
/**
* @see AbsListView#setSelector(Drawable)
*/
public W setSelector(Drawable sel) {
mView.setSelector(sel);
return (W) this;
}
/**
* @see AbsListView#setSelector(int)
*/
public W setSelector(int resID) {
mView.setSelector(resID);
return (W) this;
}
/**
* @see AbsListView#setVelocityScale(float)
*/
public W setVelocityScale(float scale) {
mView.setVelocityScale(scale);
return (W) this;
}
/**
* @see AbsListView#setAdapter(ListAdapter)
*/
public W setAdapter(ListAdapter adapter) {
mView.setAdapter(adapter);
return (W) this;
}
/**
* @see AbsListView#setItemChecked(int, boolean)
*/
public W setItemChecked(int position, boolean value) {
mView.setItemChecked(position, value);
return (W) this;
}
/**
* @see AbsListView#clearChoices()
*/
public W clearChoices() {
mView.clearChoices();
return (W) this;
}
/**
* @see AbsListView#setChoiceMode(int)
*/
public W setChoiceMode(int choiceMode) {
mView.setChoiceMode(choiceMode);
return (W) this;
}
/**
* @see AbsListView#setFilterText(String)
*/
public W setFilterText(String filterText) {
mView.setFilterText(filterText);
return (W) this;
}
}
| |
/*
* WSO2 API Manager - Publisher API
* This specifies a **RESTful API** for WSO2 **API Manager** - Publisher. Please see [full swagger definition](https://raw.githubusercontent.com/wso2/carbon-apimgt/v6.1.66/components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher/src/main/resources/publisher-api.yaml) of the API which is written using [swagger 2.0](http://swagger.io/) specification.
*
* OpenAPI spec version: 0.11.0
* Contact: architecture@wso2.com
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package org.wso2.carbon.apimgt.samples.utils.publisher.rest.client.api;
import org.wso2.carbon.apimgt.samples.utils.publisher.rest.client.ApiCallback;
import org.wso2.carbon.apimgt.samples.utils.publisher.rest.client.ApiClient;
import org.wso2.carbon.apimgt.samples.utils.publisher.rest.client.ApiException;
import org.wso2.carbon.apimgt.samples.utils.publisher.rest.client.ApiResponse;
import org.wso2.carbon.apimgt.samples.utils.publisher.rest.client.Configuration;
import org.wso2.carbon.apimgt.samples.utils.publisher.rest.client.Pair;
import org.wso2.carbon.apimgt.samples.utils.publisher.rest.client.ProgressRequestBody;
import org.wso2.carbon.apimgt.samples.utils.publisher.rest.client.ProgressResponseBody;
import com.google.gson.reflect.TypeToken;
import java.io.IOException;
import org.wso2.carbon.apimgt.samples.utils.publisher.rest.client.model.Error;
import org.wso2.carbon.apimgt.samples.utils.publisher.rest.client.model.ExtendedSubscription;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class SubscriptionIndividualApi {
private ApiClient apiClient;
public SubscriptionIndividualApi() {
this(Configuration.getDefaultApiClient());
}
public SubscriptionIndividualApi(ApiClient apiClient) {
this.apiClient = apiClient;
}
public ApiClient getApiClient() {
return apiClient;
}
public void setApiClient(ApiClient apiClient) {
this.apiClient = apiClient;
}
/* Build call for subscriptionsBlockSubscriptionPost */
private com.squareup.okhttp.Call subscriptionsBlockSubscriptionPostCall(String subscriptionId, String blockState, String ifMatch, String ifUnmodifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/subscriptions/block-subscription".replaceAll("\\{format\\}","json");
List<Pair> localVarQueryParams = new ArrayList<Pair>();
if (subscriptionId != null)
localVarQueryParams.addAll(apiClient.parameterToPairs("", "subscriptionId", subscriptionId));
if (blockState != null)
localVarQueryParams.addAll(apiClient.parameterToPairs("", "blockState", blockState));
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
if (ifMatch != null)
localVarHeaderParams.put("If-Match", apiClient.parameterToString(ifMatch));
if (ifUnmodifiedSince != null)
localVarHeaderParams.put("If-Unmodified-Since", apiClient.parameterToString(ifUnmodifiedSince));
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { };
return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call subscriptionsBlockSubscriptionPostValidateBeforeCall(String subscriptionId, String blockState, String ifMatch, String ifUnmodifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'subscriptionId' is set
if (subscriptionId == null) {
throw new ApiException("Missing the required parameter 'subscriptionId' when calling subscriptionsBlockSubscriptionPost(Async)");
}
// verify the required parameter 'blockState' is set
if (blockState == null) {
throw new ApiException("Missing the required parameter 'blockState' when calling subscriptionsBlockSubscriptionPost(Async)");
}
com.squareup.okhttp.Call call = subscriptionsBlockSubscriptionPostCall(subscriptionId, blockState, ifMatch, ifUnmodifiedSince, progressListener, progressRequestListener);
return call;
}
/**
* Block a subscription
* This operation can be used to block a subscription. Along with the request, `blockState` must be specified as a query parameter. 1. `BLOCKED` : Subscription is completely blocked for both Production and Sandbox environments. 2. `PROD_ONLY_BLOCKED` : Subscription is blocked for Production environment only.
* @param subscriptionId Subscription Id (required)
* @param blockState Subscription block state. (required)
* @param ifMatch Validator for conditional requests; based on ETag (Will be supported in future). (optional)
* @param ifUnmodifiedSince Validator for conditional requests; based on Last Modified header (Will be supported in future). (optional)
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public void subscriptionsBlockSubscriptionPost(String subscriptionId, String blockState, String ifMatch, String ifUnmodifiedSince) throws ApiException {
subscriptionsBlockSubscriptionPostWithHttpInfo(subscriptionId, blockState, ifMatch, ifUnmodifiedSince);
}
/**
* Block a subscription
* This operation can be used to block a subscription. Along with the request, `blockState` must be specified as a query parameter. 1. `BLOCKED` : Subscription is completely blocked for both Production and Sandbox environments. 2. `PROD_ONLY_BLOCKED` : Subscription is blocked for Production environment only.
* @param subscriptionId Subscription Id (required)
* @param blockState Subscription block state. (required)
* @param ifMatch Validator for conditional requests; based on ETag (Will be supported in future). (optional)
* @param ifUnmodifiedSince Validator for conditional requests; based on Last Modified header (Will be supported in future). (optional)
* @return ApiResponse<Void>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<Void> subscriptionsBlockSubscriptionPostWithHttpInfo(String subscriptionId, String blockState, String ifMatch, String ifUnmodifiedSince) throws ApiException {
com.squareup.okhttp.Call call = subscriptionsBlockSubscriptionPostValidateBeforeCall(subscriptionId, blockState, ifMatch, ifUnmodifiedSince, null, null);
return apiClient.execute(call);
}
/**
* Block a subscription (asynchronously)
* This operation can be used to block a subscription. Along with the request, `blockState` must be specified as a query parameter. 1. `BLOCKED` : Subscription is completely blocked for both Production and Sandbox environments. 2. `PROD_ONLY_BLOCKED` : Subscription is blocked for Production environment only.
* @param subscriptionId Subscription Id (required)
* @param blockState Subscription block state. (required)
* @param ifMatch Validator for conditional requests; based on ETag (Will be supported in future). (optional)
* @param ifUnmodifiedSince Validator for conditional requests; based on Last Modified header (Will be supported in future). (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call subscriptionsBlockSubscriptionPostAsync(String subscriptionId, String blockState, String ifMatch, String ifUnmodifiedSince, final ApiCallback<Void> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = subscriptionsBlockSubscriptionPostValidateBeforeCall(subscriptionId, blockState, ifMatch, ifUnmodifiedSince, progressListener, progressRequestListener);
apiClient.executeAsync(call, callback);
return call;
}
/* Build call for subscriptionsSubscriptionIdGet */
private com.squareup.okhttp.Call subscriptionsSubscriptionIdGetCall(String subscriptionId, String accept, String ifNoneMatch, String ifModifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/subscriptions/{subscriptionId}".replaceAll("\\{format\\}","json")
.replaceAll("\\{" + "subscriptionId" + "\\}", apiClient.escapeString(subscriptionId.toString()));
List<Pair> localVarQueryParams = new ArrayList<Pair>();
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
if (accept != null)
localVarHeaderParams.put("Accept", apiClient.parameterToString(accept));
if (ifNoneMatch != null)
localVarHeaderParams.put("If-None-Match", apiClient.parameterToString(ifNoneMatch));
if (ifModifiedSince != null)
localVarHeaderParams.put("If-Modified-Since", apiClient.parameterToString(ifModifiedSince));
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { };
return apiClient.buildCall(localVarPath, "GET", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call subscriptionsSubscriptionIdGetValidateBeforeCall(String subscriptionId, String accept, String ifNoneMatch, String ifModifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'subscriptionId' is set
if (subscriptionId == null) {
throw new ApiException("Missing the required parameter 'subscriptionId' when calling subscriptionsSubscriptionIdGet(Async)");
}
com.squareup.okhttp.Call call = subscriptionsSubscriptionIdGetCall(subscriptionId, accept, ifNoneMatch, ifModifiedSince, progressListener, progressRequestListener);
return call;
}
/**
* Get details of a subscription
* This operation can be used to get details of a single subscription.
* @param subscriptionId Subscription Id (required)
* @param accept Media types acceptable for the response. Default is application/json. (optional, default to application/json)
* @param ifNoneMatch Validator for conditional requests; based on the ETag of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @param ifModifiedSince Validator for conditional requests; based on Last Modified header of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @return ExtendedSubscription
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ExtendedSubscription subscriptionsSubscriptionIdGet(String subscriptionId, String accept, String ifNoneMatch, String ifModifiedSince) throws ApiException {
ApiResponse<ExtendedSubscription> resp = subscriptionsSubscriptionIdGetWithHttpInfo(subscriptionId, accept, ifNoneMatch, ifModifiedSince);
return resp.getData();
}
/**
* Get details of a subscription
* This operation can be used to get details of a single subscription.
* @param subscriptionId Subscription Id (required)
* @param accept Media types acceptable for the response. Default is application/json. (optional, default to application/json)
* @param ifNoneMatch Validator for conditional requests; based on the ETag of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @param ifModifiedSince Validator for conditional requests; based on Last Modified header of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @return ApiResponse<ExtendedSubscription>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<ExtendedSubscription> subscriptionsSubscriptionIdGetWithHttpInfo(String subscriptionId, String accept, String ifNoneMatch, String ifModifiedSince) throws ApiException {
com.squareup.okhttp.Call call = subscriptionsSubscriptionIdGetValidateBeforeCall(subscriptionId, accept, ifNoneMatch, ifModifiedSince, null, null);
Type localVarReturnType = new TypeToken<ExtendedSubscription>(){}.getType();
return apiClient.execute(call, localVarReturnType);
}
/**
* Get details of a subscription (asynchronously)
* This operation can be used to get details of a single subscription.
* @param subscriptionId Subscription Id (required)
* @param accept Media types acceptable for the response. Default is application/json. (optional, default to application/json)
* @param ifNoneMatch Validator for conditional requests; based on the ETag of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @param ifModifiedSince Validator for conditional requests; based on Last Modified header of the formerly retrieved variant of the resource (Will be supported in future). (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call subscriptionsSubscriptionIdGetAsync(String subscriptionId, String accept, String ifNoneMatch, String ifModifiedSince, final ApiCallback<ExtendedSubscription> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = subscriptionsSubscriptionIdGetValidateBeforeCall(subscriptionId, accept, ifNoneMatch, ifModifiedSince, progressListener, progressRequestListener);
Type localVarReturnType = new TypeToken<ExtendedSubscription>(){}.getType();
apiClient.executeAsync(call, localVarReturnType, callback);
return call;
}
/* Build call for subscriptionsUnblockSubscriptionPost */
private com.squareup.okhttp.Call subscriptionsUnblockSubscriptionPostCall(String subscriptionId, String ifMatch, String ifUnmodifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
Object localVarPostBody = null;
// create path and map variables
String localVarPath = "/subscriptions/unblock-subscription".replaceAll("\\{format\\}","json");
List<Pair> localVarQueryParams = new ArrayList<Pair>();
if (subscriptionId != null)
localVarQueryParams.addAll(apiClient.parameterToPairs("", "subscriptionId", subscriptionId));
Map<String, String> localVarHeaderParams = new HashMap<String, String>();
if (ifMatch != null)
localVarHeaderParams.put("If-Match", apiClient.parameterToString(ifMatch));
if (ifUnmodifiedSince != null)
localVarHeaderParams.put("If-Unmodified-Since", apiClient.parameterToString(ifUnmodifiedSince));
Map<String, Object> localVarFormParams = new HashMap<String, Object>();
final String[] localVarAccepts = {
"application/json"
};
final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts);
if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept);
final String[] localVarContentTypes = {
"application/json"
};
final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes);
localVarHeaderParams.put("Content-Type", localVarContentType);
if(progressListener != null) {
apiClient.getHttpClient().networkInterceptors().add(new com.squareup.okhttp.Interceptor() {
@Override
public com.squareup.okhttp.Response intercept(com.squareup.okhttp.Interceptor.Chain chain) throws IOException {
com.squareup.okhttp.Response originalResponse = chain.proceed(chain.request());
return originalResponse.newBuilder()
.body(new ProgressResponseBody(originalResponse.body(), progressListener))
.build();
}
});
}
String[] localVarAuthNames = new String[] { };
return apiClient.buildCall(localVarPath, "POST", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener);
}
@SuppressWarnings("rawtypes")
private com.squareup.okhttp.Call subscriptionsUnblockSubscriptionPostValidateBeforeCall(String subscriptionId, String ifMatch, String ifUnmodifiedSince, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException {
// verify the required parameter 'subscriptionId' is set
if (subscriptionId == null) {
throw new ApiException("Missing the required parameter 'subscriptionId' when calling subscriptionsUnblockSubscriptionPost(Async)");
}
com.squareup.okhttp.Call call = subscriptionsUnblockSubscriptionPostCall(subscriptionId, ifMatch, ifUnmodifiedSince, progressListener, progressRequestListener);
return call;
}
/**
* Unblock a Subscription
* This operation can be used to unblock a subscription specifying the subscription Id. The subscription will be fully unblocked after performing this operation.
* @param subscriptionId Subscription Id (required)
* @param ifMatch Validator for conditional requests; based on ETag (Will be supported in future). (optional)
* @param ifUnmodifiedSince Validator for conditional requests; based on Last Modified header (Will be supported in future). (optional)
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public void subscriptionsUnblockSubscriptionPost(String subscriptionId, String ifMatch, String ifUnmodifiedSince) throws ApiException {
subscriptionsUnblockSubscriptionPostWithHttpInfo(subscriptionId, ifMatch, ifUnmodifiedSince);
}
/**
* Unblock a Subscription
* This operation can be used to unblock a subscription specifying the subscription Id. The subscription will be fully unblocked after performing this operation.
* @param subscriptionId Subscription Id (required)
* @param ifMatch Validator for conditional requests; based on ETag (Will be supported in future). (optional)
* @param ifUnmodifiedSince Validator for conditional requests; based on Last Modified header (Will be supported in future). (optional)
* @return ApiResponse<Void>
* @throws ApiException If fail to call the API, e.g. server error or cannot deserialize the response body
*/
public ApiResponse<Void> subscriptionsUnblockSubscriptionPostWithHttpInfo(String subscriptionId, String ifMatch, String ifUnmodifiedSince) throws ApiException {
com.squareup.okhttp.Call call = subscriptionsUnblockSubscriptionPostValidateBeforeCall(subscriptionId, ifMatch, ifUnmodifiedSince, null, null);
return apiClient.execute(call);
}
/**
* Unblock a Subscription (asynchronously)
* This operation can be used to unblock a subscription specifying the subscription Id. The subscription will be fully unblocked after performing this operation.
* @param subscriptionId Subscription Id (required)
* @param ifMatch Validator for conditional requests; based on ETag (Will be supported in future). (optional)
* @param ifUnmodifiedSince Validator for conditional requests; based on Last Modified header (Will be supported in future). (optional)
* @param callback The callback to be executed when the API call finishes
* @return The request call
* @throws ApiException If fail to process the API call, e.g. serializing the request body object
*/
public com.squareup.okhttp.Call subscriptionsUnblockSubscriptionPostAsync(String subscriptionId, String ifMatch, String ifUnmodifiedSince, final ApiCallback<Void> callback) throws ApiException {
ProgressResponseBody.ProgressListener progressListener = null;
ProgressRequestBody.ProgressRequestListener progressRequestListener = null;
if (callback != null) {
progressListener = new ProgressResponseBody.ProgressListener() {
@Override
public void update(long bytesRead, long contentLength, boolean done) {
callback.onDownloadProgress(bytesRead, contentLength, done);
}
};
progressRequestListener = new ProgressRequestBody.ProgressRequestListener() {
@Override
public void onRequestProgress(long bytesWritten, long contentLength, boolean done) {
callback.onUploadProgress(bytesWritten, contentLength, done);
}
};
}
com.squareup.okhttp.Call call = subscriptionsUnblockSubscriptionPostValidateBeforeCall(subscriptionId, ifMatch, ifUnmodifiedSince, progressListener, progressRequestListener);
apiClient.executeAsync(call, callback);
return call;
}
}
| |
package org.aspenos.util;
import java.io.*;
import java.util.*;
import javax.servlet.http.*;
import javax.servlet.*;
//import org.aspenos.app.aoscontentserver.util.ServerInit;
public class WebFileFetch extends WebFetch {
private String _saveDir;
private String _username;
private String _password;
private boolean _doAuthentication;
/**
* Gets and saves a remote file to the set save directory and
* uses the file's original name.
*/
public File saveRemoteFile(String url, Map params, String method)
throws Exception {
InputStream is;
if (method == null)
method = "get";
if (method.equalsIgnoreCase("post")) {
if (_doAuthentication)
is = doAuthPostStream(_username, _password, url, params);
else
is = doPostStream(url, params);
} else {
if (_doAuthentication)
is = doAuthGetStream(_username, _password, url, params);
else
is = doGetStream(url, params);
}
String fileName;
int pos = url.lastIndexOf("/");
if (pos == -1)
fileName = url;
else
fileName = url.substring(pos+1);
return readIntoFile(is, fileName);
}
/**
*
*/
public File getAsTempFile(String url, Map params, String method,
String prefix, String suffix, File tmpDir)
throws Exception {
InputStream is;
if (method == null)
method = "get";
if (method.equalsIgnoreCase("post")) {
if (_doAuthentication)
is = doAuthPostStream(_username, _password, url, params);
else
is = doPostStream(url, params);
} else {
if (_doAuthentication)
is = doAuthGetStream(_username, _password, url, params);
else
is = doGetStream(url, params);
}
return readIntoTempFile(is, prefix, suffix, tmpDir);
}
/**
*
*/
public Properties loadProperties(String url, Map params, String method)
throws Exception {
InputStream is;
if (method == null)
method = "get";
if (method.equalsIgnoreCase("post")) {
if (_doAuthentication)
is = doAuthPostStream(_username, _password, url, params);
else
is = doPostStream(url, params);
} else {
if (_doAuthentication) {
is = doAuthGetStream(_username, _password, url, params);
} else {
is = doGetStream(url, params);
}
}
Properties props = new Properties();
props.load(is);
return props;
}
/******************************************************/
/******************************************************/
/**
*
*/
public void setDoAuthentication(boolean b) {
_doAuthentication = b;
}
/**
*
*/
public boolean getDoAuthentication() {
return _doAuthentication;
}
/**
*
*/
public void setAuthInfo(String username, String password) {
_username = username;
_password = password;
}
/**
*
*/
public void setSaveDir(String saveDir) {
if (!saveDir.endsWith(File.separator))
saveDir += File.separator;
File f = new File(saveDir);
if (!f.exists())
f.mkdirs();
_saveDir = saveDir;
}
/**
*
*/
public String getSaveDir() {
return _saveDir;
}
/**
*
*/
public File getExistingFile(String filePath) {
String fileName;
int pos = filePath.lastIndexOf("/");
if (pos == -1)
fileName = filePath;
else
fileName = filePath.substring(pos+1);
File f = null;
try {
f = new File(_saveDir + fileName);
if (!f.exists())
f = null;
} catch (Exception ex) {
// ok to absorb exception
}
return f;
}
/******************************************************/
/******************************************************/
/**
* Reads a stream into the given file path. If a file
* already exists at that path, it is deleted first.
*/
private File readIntoFile(InputStream is, String fileName)
throws Exception {
File f = new File(_saveDir + fileName);
if (f.exists())
f.delete();
f.createNewFile();
writeToFile(is, f);
return f;
}
/**
*
*/
private File readIntoTempFile(InputStream is, String prefix,
String suffix, File tmpDir)
throws Exception {
if (tmpDir == null)
tmpDir = new File(_saveDir);
File tmpFile = File.createTempFile(prefix, suffix, tmpDir);
tmpFile.deleteOnExit();
writeToFile(is, tmpFile);
return tmpFile;
}
private void writeToFile(InputStream is, File f)
throws IOException {
FileOutputStream fos = null;
try {
fos = new FileOutputStream(f);
/*
byte[] buff = new byte[64];
while ((is.read(buff)) > 0) {
fos.write(buff);
}
*/
int b;
while ((b=is.read()) != -1) {
fos.write(b);
}
fos.flush();
} finally {
if (fos != null)
fos.close();
fos = null;
is.close();
is = null;
}
}
}
| |
/**
* Copyright 2014-2017 Riccardo Massera (TheCoder4.Eu) and Stephan Rauh (http://www.beyondjava.net).
*
* This file is part of BootsFaces.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.bootsfaces.component.navBar;
import java.io.IOException;
import javax.faces.component.UIComponent;
import javax.faces.context.FacesContext;
import javax.faces.context.ResponseWriter;
import javax.faces.render.FacesRenderer;
import net.bootsfaces.component.navLink.AbstractNavLink;
import net.bootsfaces.render.CoreRenderer;
import net.bootsfaces.render.Tooltip;
/** This class generates the HTML code of <b:navBar />. */
@FacesRenderer(componentFamily = "net.bootsfaces.component", rendererType = "net.bootsfaces.component.navBar.NavBar")
public class NavBarRenderer extends CoreRenderer {
/**
* This methods generates the HTML code of the current b:navBar.
* <code>encodeBegin</code> generates the start of the component. After the,
* the JSF framework calls <code>encodeChildren()</code> to generate the
* HTML code between the beginning and the end of the component. For
* instance, in the case of a panel component the content of the panel is
* generated by <code>encodeChildren()</code>. After that,
* <code>encodeEnd()</code> is called to generate the rest of the HTML code.
*
* @param context
* the FacesContext.
* @param component
* the current b:navBar.
* @throws IOException
* thrown if something goes wrong when writing the HTML code.
*/
@Override
public void encodeBegin(FacesContext context, UIComponent component) throws IOException {
if (!component.isRendered()) {
return;
}
NavBar navBar = (NavBar) component;
ResponseWriter rw = context.getResponseWriter();
String clientId = navBar.getClientId();
if (!component.isRendered()) {
return;
}
String fixed = navBar.getFixed();
boolean fluid = navBar.isFluid();
String ns = navBar.getStyleClass();
if (ns==null) {
ns="";
} else {
ns += " ";
}
if (navBar.isInverse()) {
ns += "navbar navbar-inverse";
} else {
ns += "navbar navbar-default";
}
if (navBar.getPosition() != null) {
// new API
if (navBar.getPosition().equals("top")) {
if (navBar.isSticky()) {
ns += " navbar-fixed-top";
} else {
ns += " navbar-static-top";
}
} else if (navBar.getPosition().equals("bottom")) {
if (navBar.isSticky()) {
ns += " navbar-fixed-bottom";
} else {
ns += " navbar-fixed-bottom navbar-nonsticky";
}
} else {
// don't add any class
}
} else {
if (fixed != null) {
if (fixed.equals("top")) {
ns += " navbar-fixed-top";
}
if (fixed.equals("bottom")) {
ns += " navbar-fixed-bottom";
}
if (fixed.equals("non-sticky")) {
ns += " navbar-fixed-bottom navbar-nonsticky";
}
}
if (navBar.isStatic()) {
ns += " navbar-static-top";
}
}
/*
* The <nav> tag defines a set of navigation links. The <nav> element is
* intended only for major block of navigation links. The <nav> tag is
* supported in Internet Explorer 9, Firefox, Opera, Chrome, and Safari.
* See http://www.w3schools.com/tags/tag_nav.asp Note: Internet Explorer
* 8 and earlier versions, do not support the <nav> tag. When IE8 will
* be dropped there will be HTML5 <nav> tag instead of <div>
*/
rw.startElement("div", navBar);
rw.writeAttribute("id", navBar.getClientId(context), "id");
Tooltip.generateTooltip(context, navBar, rw);
rw.writeAttribute("class", ns, "class");
super.writeAttribute(rw, "style", navBar.getStyle());
rw.writeAttribute("role", "navigation", null);
rw.startElement("div", navBar);
rw.writeAttribute("class", fluid ? "container-fluid" : "container", "class");
// x Layout Centrato. TODO : layout full width
rw.startElement("div", navBar);
rw.writeAttribute("class", "navbar-header", "class"); // navbar-header
rw.startElement("button", navBar);
String tabindex = navBar.getTabindex();
if (!"0".equals(tabindex)) {
writeAttribute(rw, "tabindex", tabindex, null);
}
rw.writeAttribute("type", "button", "type");
rw.writeAttribute("class", "navbar-toggle", "class");
rw.writeAttribute("data-toggle", "collapse", "type");
rw.writeAttribute("data-target", "#" + escapeClientId(clientId) + "_inner", "data-target");
rw.startElement("span", navBar);
rw.writeAttribute("class", "sr-only", "class");
rw.writeText("Toggle navigation", null);
rw.endElement("span");
if (navBar.getFacet("kebab") != null) {
navBar.getFacet("kebab").encodeAll(context);
} else {
rw.startElement("span", navBar);
rw.writeAttribute("class", "icon-bar", "class");
rw.endElement("span");
rw.startElement("span", navBar);
rw.writeAttribute("class", "icon-bar", "class");
rw.endElement("span");
rw.startElement("span", navBar);
rw.writeAttribute("class", "icon-bar", "class");
rw.endElement("span");
}
rw.endElement("button");
String brand = navBar.getBrand();
String brandImg = navBar.getBrandImg();
if (brand != null || brandImg != null) {
rw.startElement("a", navBar);
String onClick = navBar.getOnclick();
if (null != onClick) {
rw.writeAttribute("onclick", onClick, "onclick");
}
String styleClass = navBar.getBrandStyleClass();
if (null == styleClass) {
rw.writeAttribute("class", "navbar-brand", "class");
} else {
rw.writeAttribute("class", "navbar-brand " + navBar.getBrandStyleClass(), "class");
}
writeAttribute(rw, "style", navBar.getBrandStyle());
String href = navBar.getBrandHref();
if (href == null) {
rw.writeAttribute("href", "#", "href");
} else {
rw.writeAttribute("href", href, "href");
writeAttribute(rw, "target", navBar.getBrandTarget());
}
rw.startElement("span", navBar);
if (brandImg != null) {
String altText = navBar.getAlt();
if (altText == null)
altText = "Brand"; // default
rw.startElement("img", navBar);
rw.writeAttribute("alt", altText, "alt");
rw.writeAttribute("src", brandImg, "src");
writeAttribute(rw, "style", navBar.getBrandImgStyle());
writeAttribute(rw, "class", navBar.getBrandImgStyleClass());
rw.endElement("img");
}
if (brand != null)
rw.writeText(brand, null);
rw.endElement("span");
rw.endElement("a");
}
rw.endElement("div"); // navbar-header
/*
* <!-- Collect the nav links, forms, and other content for toggling -->
* <div class="collapse navbar-collapse navbar-ex1-collapse">
*/
rw.startElement("div", navBar);
rw.writeAttribute("id", escapeClientId(clientId) + "_inner", "id");
rw.writeAttribute("class", "collapse navbar-collapse navbar-ex1-collapse", "class");
}
/**
* This methods generates the HTML code of the current b:navBar.
* <code>encodeBegin</code> generates the start of the component. After the,
* the JSF framework calls <code>encodeChildren()</code> to generate the
* HTML code between the beginning and the end of the component. For
* instance, in the case of a panel component the content of the panel is
* generated by <code>encodeChildren()</code>. After that,
* <code>encodeEnd()</code> is called to generate the rest of the HTML code.
*
* @param context
* the FacesContext.
* @param component
* the current b:navBar.
* @throws IOException
* thrown if something goes wrong when writing the HTML code.
*/
@Override
public void encodeEnd(FacesContext context, UIComponent component) throws IOException {
if (!component.isRendered()) {
return;
}
ResponseWriter rw = context.getResponseWriter();
rw.endElement("div"); // collapse
rw.endElement("div"); // container
rw.endElement("div"); // navbar
Tooltip.activateTooltips(context, component);
}
}
| |
/**
*/
package com.specmate.migration.test.objectadded.testmodel.artefact.util;
import com.specmate.migration.test.objectadded.testmodel.artefact.*;
import com.specmate.migration.test.objectadded.testmodel.base.IContainer;
import com.specmate.migration.test.objectadded.testmodel.base.IContentElement;
import com.specmate.migration.test.objectadded.testmodel.base.IID;
import com.specmate.migration.test.objectadded.testmodel.base.IModifiable;
import com.specmate.migration.test.objectadded.testmodel.base.ITestable;
import org.eclipse.emf.common.notify.Adapter;
import org.eclipse.emf.common.notify.Notifier;
import org.eclipse.emf.common.notify.impl.AdapterFactoryImpl;
import org.eclipse.emf.ecore.EObject;
/**
* <!-- begin-user-doc -->
* The <b>Adapter Factory</b> for the model.
* It provides an adapter <code>createXXX</code> method for each class of the model.
* <!-- end-user-doc -->
* @see com.specmate.migration.test.objectadded.testmodel.artefact.ArtefactPackage
* @generated
*/
public class ArtefactAdapterFactory extends AdapterFactoryImpl {
/**
* The cached model package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static ArtefactPackage modelPackage;
/**
* Creates an instance of the adapter factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ArtefactAdapterFactory() {
if (modelPackage == null) {
modelPackage = ArtefactPackage.eINSTANCE;
}
}
/**
* Returns whether this factory is applicable for the type of the object.
* <!-- begin-user-doc -->
* This implementation returns <code>true</code> if the object is either the model's package or is an instance object of the model.
* <!-- end-user-doc -->
* @return whether this factory is applicable for the type of the object.
* @generated
*/
@Override
public boolean isFactoryForType(Object object) {
if (object == modelPackage) {
return true;
}
if (object instanceof EObject) {
return ((EObject)object).eClass().getEPackage() == modelPackage;
}
return false;
}
/**
* The switch that delegates to the <code>createXXX</code> methods.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ArtefactSwitch<Adapter> modelSwitch =
new ArtefactSwitch<Adapter>() {
@Override
public Adapter caseDiagram(Diagram object) {
return createDiagramAdapter();
}
@Override
public Adapter caseDocument(Document object) {
return createDocumentAdapter();
}
@Override
public Adapter caseSketch(Sketch object) {
return createSketchAdapter();
}
@Override
public Adapter caseITestable(ITestable object) {
return createITestableAdapter();
}
@Override
public Adapter caseIModifiable(IModifiable object) {
return createIModifiableAdapter();
}
@Override
public Adapter caseIID(IID object) {
return createIIDAdapter();
}
@Override
public Adapter caseIContentElement(IContentElement object) {
return createIContentElementAdapter();
}
@Override
public Adapter caseIContainer(IContainer object) {
return createIContainerAdapter();
}
@Override
public Adapter defaultCase(EObject object) {
return createEObjectAdapter();
}
};
/**
* Creates an adapter for the <code>target</code>.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param target the object to adapt.
* @return the adapter for the <code>target</code>.
* @generated
*/
@Override
public Adapter createAdapter(Notifier target) {
return modelSwitch.doSwitch((EObject)target);
}
/**
* Creates a new adapter for an object of class '{@link com.specmate.migration.test.objectadded.testmodel.artefact.Diagram <em>Diagram</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see com.specmate.migration.test.objectadded.testmodel.artefact.Diagram
* @generated
*/
public Adapter createDiagramAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link com.specmate.migration.test.objectadded.testmodel.artefact.Document <em>Document</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see com.specmate.migration.test.objectadded.testmodel.artefact.Document
* @generated
*/
public Adapter createDocumentAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link com.specmate.migration.test.objectadded.testmodel.artefact.Sketch <em>Sketch</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see com.specmate.migration.test.objectadded.testmodel.artefact.Sketch
* @generated
*/
public Adapter createSketchAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link com.specmate.migration.test.objectadded.testmodel.base.ITestable <em>ITestable</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see com.specmate.migration.test.objectadded.testmodel.base.ITestable
* @generated
*/
public Adapter createITestableAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link com.specmate.migration.test.objectadded.testmodel.base.IModifiable <em>IModifiable</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see com.specmate.migration.test.objectadded.testmodel.base.IModifiable
* @generated
*/
public Adapter createIModifiableAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link com.specmate.migration.test.objectadded.testmodel.base.IID <em>IID</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see com.specmate.migration.test.objectadded.testmodel.base.IID
* @generated
*/
public Adapter createIIDAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link com.specmate.migration.test.objectadded.testmodel.base.IContentElement <em>IContent Element</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see com.specmate.migration.test.objectadded.testmodel.base.IContentElement
* @generated
*/
public Adapter createIContentElementAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link com.specmate.migration.test.objectadded.testmodel.base.IContainer <em>IContainer</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see com.specmate.migration.test.objectadded.testmodel.base.IContainer
* @generated
*/
public Adapter createIContainerAdapter() {
return null;
}
/**
* Creates a new adapter for the default case.
* <!-- begin-user-doc -->
* This default implementation returns null.
* <!-- end-user-doc -->
* @return the new adapter.
* @generated
*/
public Adapter createEObjectAdapter() {
return null;
}
} //ArtefactAdapterFactory
| |
//========================================================================
//Copyright 2007-2010 David Yu dyuproject@gmail.com
//------------------------------------------------------------------------
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//http://www.apache.org/licenses/LICENSE-2.0
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//========================================================================
package io.protostuff.me;
import java.io.IOException;
/**
* The flexible output for outputs that use {@link WriteSession}.
*
* @author David Yu
* @created Sep 20, 2010
*/
public abstract class WriteSink
{
public static WriteSink BUFFERED = new Buffered();
public static WriteSink STREAMED = new Streamed();
public static class Buffered extends WriteSink
{
public LinkedBuffer drain(final WriteSession session,
final LinkedBuffer lb) throws IOException
{
// grow
return new LinkedBuffer(session.nextBufferSize, lb);
}
public LinkedBuffer writeByteArrayB64(final byte[] value,
final int offset, final int valueLen,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return B64Code.encode(value, offset, valueLen, session, lb);
}
public LinkedBuffer writeByteArray(final byte[] value,
final int offset, final int valueLen,
final WriteSession session, LinkedBuffer lb) throws IOException
{
if (valueLen == 0)
return lb;
session.size += valueLen;
final int available = lb.buffer.length - lb.offset;
if (valueLen > available)
{
if (available + session.nextBufferSize < valueLen)
{
// too large ... so we wrap and insert (zero-copy)
if (available == 0)
{
// buffer was actually full ... return a fresh buffer
return new LinkedBuffer(session.nextBufferSize,
new LinkedBuffer(value, offset, offset + valueLen, lb));
}
// continue with the existing byte array of the previous buffer
return new LinkedBuffer(lb,
new LinkedBuffer(value, offset, offset + valueLen, lb));
}
// copy what can fit
System.arraycopy(value, offset, lb.buffer, lb.offset, available);
lb.offset += available;
// grow
lb = new LinkedBuffer(session.nextBufferSize, lb);
final int leftover = valueLen - available;
// copy what's left
System.arraycopy(value, offset + available, lb.buffer, 0, leftover);
lb.offset += leftover;
return lb;
}
// it fits
System.arraycopy(value, offset, lb.buffer, lb.offset, valueLen);
lb.offset += valueLen;
return lb;
}
public LinkedBuffer writeByte(final byte value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
session.size++;
if (lb.offset == lb.buffer.length)
{
// grow
lb = new LinkedBuffer(session.nextBufferSize, lb);
}
lb.buffer[lb.offset++] = value;
return lb;
}
public LinkedBuffer writeInt32(final int value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
session.size += 4;
if (lb.offset + 4 > lb.buffer.length)
{
// grow
lb = new LinkedBuffer(session.nextBufferSize, lb);
}
IntSerializer.writeInt32(value, lb.buffer, lb.offset);
lb.offset += 4;
return lb;
}
public LinkedBuffer writeInt64(final long value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
session.size += 8;
if (lb.offset + 8 > lb.buffer.length)
{
// grow
lb = new LinkedBuffer(session.nextBufferSize, lb);
}
IntSerializer.writeInt64(value, lb.buffer, lb.offset);
lb.offset += 8;
return lb;
}
public LinkedBuffer writeInt32LE(final int value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
session.size += 4;
if (lb.offset + 4 > lb.buffer.length)
{
// grow
lb = new LinkedBuffer(session.nextBufferSize, lb);
}
IntSerializer.writeInt32LE(value, lb.buffer, lb.offset);
lb.offset += 4;
return lb;
}
public LinkedBuffer writeInt64LE(final long value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
session.size += 8;
if (lb.offset + 8 > lb.buffer.length)
{
// grow
lb = new LinkedBuffer(session.nextBufferSize, lb);
}
IntSerializer.writeInt64LE(value, lb.buffer, lb.offset);
lb.offset += 8;
return lb;
}
public LinkedBuffer writeVarInt32(int value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
while (true)
{
session.size++;
if (lb.offset == lb.buffer.length)
{
// grow
lb = new LinkedBuffer(session.nextBufferSize, lb);
}
if ((value & ~0x7F) == 0)
{
lb.buffer[lb.offset++] = (byte) value;
return lb;
}
lb.buffer[lb.offset++] = (byte) ((value & 0x7F) | 0x80);
value >>>= 7;
}
}
public LinkedBuffer writeVarInt64(long value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
while (true)
{
session.size++;
if (lb.offset == lb.buffer.length)
{
// grow
lb = new LinkedBuffer(session.nextBufferSize, lb);
}
if ((value & ~0x7FL) == 0)
{
lb.buffer[lb.offset++] = (byte) value;
return lb;
}
lb.buffer[lb.offset++] = (byte) (((int) value & 0x7F) | 0x80);
value >>>= 7;
}
}
public LinkedBuffer writeStrFromInt(final int value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
return StringSerializer.writeInt(value, session, lb);
}
public LinkedBuffer writeStrFromLong(final long value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
return StringSerializer.writeLong(value, session, lb);
}
public LinkedBuffer writeStrFromFloat(final float value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
return StringSerializer.writeFloat(value, session, lb);
}
public LinkedBuffer writeStrFromDouble(final double value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
return StringSerializer.writeDouble(value, session, lb);
}
public LinkedBuffer writeStrAscii(final String value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
return StringSerializer.writeAscii(value, session, lb);
}
public LinkedBuffer writeStrUTF8(final String value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
return StringSerializer.writeUTF8(value, session, lb);
}
public LinkedBuffer writeStrUTF8VarDelimited(final String value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
return StringSerializer.writeUTF8VarDelimited(value, session, lb);
}
public LinkedBuffer writeStrUTF8FixedDelimited(final String value,
final boolean littleEndian, final WriteSession session, LinkedBuffer lb)
throws IOException
{
return StringSerializer.writeUTF8FixedDelimited(value, littleEndian, session,
lb);
}
}
public static class Streamed extends WriteSink
{
public LinkedBuffer drain(final WriteSession session,
final LinkedBuffer lb) throws IOException
{
// flush and reset
session.out.write(lb.buffer, lb.start, lb.offset - lb.start);
lb.offset = lb.start;
return lb;
}
public LinkedBuffer writeByteArrayB64(final byte[] value,
final int offset, final int valueLen,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return B64Code.encode(value, offset, valueLen, session, session.out, lb);
}
public LinkedBuffer writeByteArray(final byte[] value,
final int offset, final int valueLen,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
if (valueLen == 0)
return lb;
session.size += valueLen;
if (lb.offset + valueLen > lb.buffer.length)
{
// flush and reset
session.out.write(lb.buffer, lb.start, lb.offset - lb.start);
lb.offset = lb.start;
// flush
session.out.write(value, offset, valueLen);
return lb;
}
System.arraycopy(value, offset, lb.buffer, lb.offset, valueLen);
lb.offset += valueLen;
return lb;
}
public LinkedBuffer writeByte(final byte value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
session.size++;
if (lb.offset == lb.buffer.length)
{
// flush and reset
session.out.write(lb.buffer, lb.start, lb.offset - lb.start);
lb.offset = lb.start;
}
lb.buffer[lb.offset++] = value;
return lb;
}
public LinkedBuffer writeInt32(final int value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
session.size += 4;
if (lb.offset + 4 > lb.buffer.length)
{
// flush and reset
session.out.write(lb.buffer, lb.start, lb.offset - lb.start);
lb.offset = lb.start;
}
IntSerializer.writeInt32(value, lb.buffer, lb.offset);
lb.offset += 4;
return lb;
}
public LinkedBuffer writeInt64(final long value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
session.size += 8;
if (lb.offset + 8 > lb.buffer.length)
{
// flush and reset
session.out.write(lb.buffer, lb.start, lb.offset - lb.start);
lb.offset = lb.start;
}
IntSerializer.writeInt64(value, lb.buffer, lb.offset);
lb.offset += 8;
return lb;
}
public LinkedBuffer writeInt32LE(final int value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
session.size += 4;
if (lb.offset + 4 > lb.buffer.length)
{
// flush and reset
session.out.write(lb.buffer, lb.start, lb.offset - lb.start);
lb.offset = lb.start;
}
IntSerializer.writeInt32LE(value, lb.buffer, lb.offset);
lb.offset += 4;
return lb;
}
public LinkedBuffer writeInt64LE(final long value,
final WriteSession session, LinkedBuffer lb) throws IOException
{
session.size += 8;
if (lb.offset + 8 > lb.buffer.length)
{
// flush and reset
session.out.write(lb.buffer, lb.start, lb.offset - lb.start);
lb.offset = lb.start;
}
IntSerializer.writeInt64LE(value, lb.buffer, lb.offset);
lb.offset += 8;
return lb;
}
public LinkedBuffer writeVarInt32(int value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
while (true)
{
session.size++;
if (lb.offset == lb.buffer.length)
{
// flush and reset
session.out.write(lb.buffer, lb.start, lb.offset - lb.start);
lb.offset = lb.start;
}
if ((value & ~0x7F) == 0)
{
lb.buffer[lb.offset++] = (byte) value;
return lb;
}
lb.buffer[lb.offset++] = (byte) ((value & 0x7F) | 0x80);
value >>>= 7;
}
}
public LinkedBuffer writeVarInt64(long value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
while (true)
{
session.size++;
if (lb.offset == lb.buffer.length)
{
// flush and reset
session.out.write(lb.buffer, lb.start, lb.offset - lb.start);
lb.offset = lb.start;
}
if ((value & ~0x7FL) == 0)
{
lb.buffer[lb.offset++] = (byte) value;
return lb;
}
lb.buffer[lb.offset++] = (byte) (((int) value & 0x7F) | 0x80);
value >>>= 7;
}
}
public LinkedBuffer writeStrFromInt(final int value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return StreamedStringSerializer.writeInt(value, session, session.out, lb);
}
public LinkedBuffer writeStrFromLong(final long value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return StreamedStringSerializer.writeLong(value, session, session.out, lb);
}
public LinkedBuffer writeStrFromFloat(final float value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return StreamedStringSerializer.writeFloat(value, session, session.out, lb);
}
public LinkedBuffer writeStrFromDouble(final double value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return StreamedStringSerializer.writeDouble(value, session, session.out, lb);
}
public LinkedBuffer writeStrAscii(final String value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return StreamedStringSerializer.writeAscii(value, session, session.out, lb);
}
public LinkedBuffer writeStrUTF8(final String value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return StreamedStringSerializer.writeUTF8(value, session, session.out, lb);
}
public LinkedBuffer writeStrUTF8VarDelimited(final String value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return StreamedStringSerializer.writeUTF8VarDelimited(value, session,
session.out, lb);
}
public LinkedBuffer writeStrUTF8FixedDelimited(final String value,
final boolean littleEndian, final WriteSession session,
final LinkedBuffer lb) throws IOException
{
return StreamedStringSerializer.writeUTF8FixedDelimited(value,
littleEndian, session, session.out, lb);
}
}
;
public abstract LinkedBuffer drain(final WriteSession session,
final LinkedBuffer lb) throws IOException;
public final LinkedBuffer writeByteArrayB64(final byte[] value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return writeByteArrayB64(value, 0, value.length, session, lb);
}
public abstract LinkedBuffer writeByteArrayB64(final byte[] value,
final int offset, final int length, final WriteSession session, final LinkedBuffer lb)
throws IOException;
public final LinkedBuffer writeByteArray(final byte[] value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return writeByteArray(value, 0, value.length, session, lb);
}
public abstract LinkedBuffer writeByteArray(final byte[] value,
final int offset, final int length, final WriteSession session, final LinkedBuffer lb)
throws IOException;
public abstract LinkedBuffer writeByte(final byte value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
// public abstract LinkedBuffer writeBool(final boolean value,
// final WriteSession session, final LinkedBuffer lb) throws IOException;
public abstract LinkedBuffer writeInt32(final int value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
public abstract LinkedBuffer writeInt64(final long value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
public final LinkedBuffer writeFloat(final float value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return writeInt32(Float.floatToIntBits(value), session, lb);
}
public final LinkedBuffer writeDouble(final double value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return writeInt64(Double.doubleToLongBits(value), session, lb);
}
public abstract LinkedBuffer writeInt32LE(final int value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
public abstract LinkedBuffer writeInt64LE(final long value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
public final LinkedBuffer writeFloatLE(final float value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return writeInt32LE(Float.floatToIntBits(value), session, lb);
}
public final LinkedBuffer writeDoubleLE(final double value,
final WriteSession session, final LinkedBuffer lb) throws IOException
{
return writeInt64LE(Double.doubleToLongBits(value), session, lb);
}
public abstract LinkedBuffer writeVarInt32(final int value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
public abstract LinkedBuffer writeVarInt64(final long value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
public abstract LinkedBuffer writeStrFromInt(final int value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
public abstract LinkedBuffer writeStrFromLong(final long value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
public abstract LinkedBuffer writeStrFromFloat(final float value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
public abstract LinkedBuffer writeStrFromDouble(final double value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
public abstract LinkedBuffer writeStrAscii(final String value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
public abstract LinkedBuffer writeStrUTF8(final String value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
public abstract LinkedBuffer writeStrUTF8VarDelimited(final String value,
final WriteSession session, final LinkedBuffer lb) throws IOException;
public abstract LinkedBuffer writeStrUTF8FixedDelimited(final String value,
final boolean littleEndian, final WriteSession session,
final LinkedBuffer lb) throws IOException;
}
| |
/*
* Copyright 2016 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.javascript.jscomp.Es6ToEs3Converter.CANNOT_CONVERT;
import com.google.common.collect.ImmutableList;
import com.google.javascript.jscomp.CompilerOptions.LanguageMode;
public final class Es6ExtractClassesTest extends CompilerTestCase {
@Override
protected CompilerPass getProcessor(Compiler compiler) {
return new Es6ExtractClasses(compiler);
}
@Override
protected void setUp() {
setAcceptedLanguage(LanguageMode.ECMASCRIPT6);
disableTypeCheck();
runTypeCheckAfterProcessing = true;
}
public void testExtractionFromCall() {
test(
"f(class{});",
LINE_JOINER.join(
"const testcode$classdecl$var0 = class {};",
"f(testcode$classdecl$var0);"));
}
public void testSelfReference1() {
test(
"var Outer = class Inner { constructor() { alert(Inner); } };",
LINE_JOINER.join(
"const testcode$classdecl$var0 = class {",
" constructor() { alert(testcode$classdecl$var0); }",
"};",
"var Outer=testcode$classdecl$var0"));
test(
"let Outer = class Inner { constructor() { alert(Inner); } };",
LINE_JOINER.join(
"const testcode$classdecl$var0 = class {",
" constructor() { alert(testcode$classdecl$var0); }",
"};",
"let Outer=testcode$classdecl$var0"));
test(
"const Outer = class Inner { constructor() { alert(Inner); } };",
LINE_JOINER.join(
"const testcode$classdecl$var0 = class {",
" constructor() { alert(testcode$classdecl$var0); }",
"};",
"const Outer=testcode$classdecl$var0"));
}
public void testSelfReference2() {
test(
"alert(class C { constructor() { alert(C); } });",
LINE_JOINER.join(
"const testcode$classdecl$var0 = class {",
" constructor() { alert(testcode$classdecl$var0); }",
"};",
"alert(testcode$classdecl$var0)"));
}
public void testSelfReference3() {
test(
LINE_JOINER.join(
"alert(class C {",
" m1() { class C {}; alert(C); }",
" m2() { alert(C); }",
"});"),
LINE_JOINER.join(
"const testcode$classdecl$var0 = class {",
" m1() { class C {}; alert(C); }",
" m2() { alert(testcode$classdecl$var0); }",
"};",
"alert(testcode$classdecl$var0)"));
}
public void testSelfReference_googModule() {
test(
LINE_JOINER.join(
"goog.module('example');",
"exports = class Inner { constructor() { alert(Inner); } };"),
LINE_JOINER.join(
"goog.module('example');",
"const testcode$classdecl$var0 = class {",
" constructor() {",
" alert(testcode$classdecl$var0);",
" }",
"};",
"exports = testcode$classdecl$var0;"));
}
public void testSelfReference_qualifiedName() {
test(
"outer.qual.Name = class Inner { constructor() { alert(Inner); } };",
LINE_JOINER.join(
"const testcode$classdecl$var0 = class {",
" constructor() {",
" alert(testcode$classdecl$var0);",
" }",
"};",
"outer.qual.Name = testcode$classdecl$var0;"));
}
public void testConstAssignment() {
test(
"var foo = bar(class {});",
LINE_JOINER.join(
"const testcode$classdecl$var0 = class {};",
"var foo = bar(testcode$classdecl$var0);"));
}
public void testLetAssignment() {
test(
"let foo = bar(class {});",
LINE_JOINER.join(
"const testcode$classdecl$var0 = class {};",
"let foo = bar(testcode$classdecl$var0);"));
}
public void testVarAssignment() {
test(
"var foo = bar(class {});",
LINE_JOINER.join(
"const testcode$classdecl$var0 = class {};",
"var foo = bar(testcode$classdecl$var0);"));
}
public void testJSDoc() {
test(
"/** @unrestricted */ var foo = class bar {};",
LINE_JOINER.join(
"/** @unrestricted */",
"const testcode$classdecl$var0 = class {};",
"/** @unrestricted */",
"var foo = testcode$classdecl$var0;"));
}
public void testFilenameContainsAt() {
test(
ImmutableList.of(
SourceFile.fromCode("unusual@name", "alert(class {});")),
ImmutableList.of(
SourceFile.fromCode(
"unusual@name",
LINE_JOINER.join(
"const unusual$name$classdecl$var0 = class{};",
"alert(unusual$name$classdecl$var0);"))));
}
public void testConditionalBlocksExtractionFromCall() {
testError("maybeTrue() && f(class{});", CANNOT_CONVERT);
}
public void testExtractionFromArrayLiteral() {
test(
"var c = [class C {}];",
LINE_JOINER.join(
"const testcode$classdecl$var0 = class {};",
"var c = [testcode$classdecl$var0];"));
}
public void testTernaryOperatorBlocksExtraction() {
testError("var c = maybeTrue() ? class A {} : anotherExpr", CANNOT_CONVERT);
testError("var c = maybeTrue() ? anotherExpr : class B {}", CANNOT_CONVERT);
}
public void testCannotExtract() {
testError(
"var c = maybeTrue() && class A extends sideEffect() {}",
CANNOT_CONVERT);
testError(
LINE_JOINER.join(
"var x;",
"function f(x, y) {}",
"f(x = 2, class Foo { [x=3]() {} });"),
CANNOT_CONVERT);
}
public void testClassesHandledByEs6ToEs3Converter() {
testSame("class C{}");
testSame("var c = class {};");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.webdav.search;
import org.apache.jackrabbit.webdav.DavException;
import org.apache.jackrabbit.webdav.DavServletResponse;
import org.apache.jackrabbit.webdav.DavConstants;
import org.apache.jackrabbit.webdav.xml.DomUtil;
import org.apache.jackrabbit.webdav.xml.Namespace;
import org.apache.jackrabbit.webdav.xml.XmlSerializable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Attr;
import java.util.Map;
import java.util.HashMap;
import java.util.Collections;
import java.util.Set;
import java.util.HashSet;
/**
* <code>SearchInfo</code> parses the 'searchrequest' element of a SEARCH
* request body and performs basic validation. Both query language and the
* query itself can be access from the resulting object.<br>
* NOTE: The query is expected to be represented by the text contained in the
* Xml element specifying the query language, thus the 'basicsearch' defined
* by the Webdav Search Internet Draft is not supported by this implementation.
* <p>
*
* Example of a valid 'searchrequest' body
* <pre>
* <d:searchrequest xmlns:d="DAV:" dcr:="http://www.day.com/jcr/webdav/1.0" >
* <dcr:xpath>//sv:node[@sv:name='myapp:paragraph'][1]</dcr:xpath>
* </d:searchrequest>
* </pre>
*
* Would return the following values:
* <pre>
* getLanguageName() -> xpath
* getQuery() -> //sv:node[@sv:name='myapp:paragraph'][1]
* </pre>
*
*/
public class SearchInfo implements SearchConstants, XmlSerializable {
private static Logger log = LoggerFactory.getLogger(SearchInfo.class);
public static final long NRESULTS_UNDEFINED = -1;
public static final long OFFSET_UNDEFINED = -1;
private static final String LIMIT = "limit";
private static final String NRESULTS = "nresults";
private static final String OFFSET = "offset";
/**
* Set of namespace uri String which are ignored in the search request.
*/
private static final Set<String> IGNORED_NAMESPACES;
static {
Set<String> s = new HashSet<String>();
s.add(Namespace.XMLNS_NAMESPACE.getURI());
s.add(Namespace.XML_NAMESPACE.getURI());
s.add(DavConstants.NAMESPACE.getURI());
IGNORED_NAMESPACES = Collections.unmodifiableSet(s);
}
private final String language;
private final Namespace languageNamespace;
private final String query;
private final Map<String, String> namespaces;
private long nresults = NRESULTS_UNDEFINED;
private long offset = OFFSET_UNDEFINED;
/**
* Create a new <code>SearchInfo</code> instance.
*
* @param language
* @param languageNamespace
* @param query
* @param namespaces the re-mapped namespaces. Key=prefix, value=uri.
*/
public SearchInfo(String language, Namespace languageNamespace, String query,
Map<String, String> namespaces) {
this.language = language;
this.languageNamespace = languageNamespace;
this.query = query;
this.namespaces = Collections.unmodifiableMap(new HashMap(namespaces));
}
/**
* Create a new <code>SearchInfo</code> instance.
*
* @param language
* @param languageNamespace
* @param query
*/
public SearchInfo(String language, Namespace languageNamespace, String query) {
this(language, languageNamespace, query, Collections.<String, String>emptyMap());
}
/**
* Returns the name of the query language to be used.
*
* @return name of the query language
*/
public String getLanguageName() {
return language;
}
/**
* Returns the namespace of the language specified with the search request element.
*
* @return namespace of the requested language.
*/
public Namespace getLanguageNameSpace() {
return languageNamespace;
}
/**
* Return the query string.
*
* @return query string
*/
public String getQuery() {
return query;
}
/**
* Returns the namespaces that have been re-mapped by the user.
*
* @return map of namespace to prefix mappings. Key=prefix, value=uri.
*/
public Map<String, String> getNamespaces() {
return namespaces;
}
/**
* Returns the maximal number of search results that should be returned.
*
* @return the maximal number of search results that should be returned.
*/
public long getNumberResults() {
return nresults;
}
/**
* Sets the maximal number of search results that should be returned.
*
* @param nresults The maximal number of search results
*/
public void setNumberResults(long nresults) {
this.nresults = nresults;
}
/**
* Returns the desired offset in the total result set.
*
* @return the desired offset in the total result set.
*/
public long getOffset() {
return offset;
}
/**
* Sets the desired offset in the total result set.
*
* @param offset The desired offset in the total result set.
*/
public void setOffset(long offset) {
this.offset = offset;
}
/**
* Return the xml representation of this <code>SearchInfo</code> instance.
*
* @return xml representation
* @param document
*/
public Element toXml(Document document) {
Element sRequestElem = DomUtil.createElement(document, XML_SEARCHREQUEST, NAMESPACE);
for (String prefix : namespaces.keySet()) {
String uri = namespaces.get(prefix);
DomUtil.setNamespaceAttribute(sRequestElem, prefix, uri);
}
DomUtil.addChildElement(sRequestElem, language, languageNamespace, query);
if (nresults != NRESULTS_UNDEFINED|| offset != OFFSET_UNDEFINED) {
Element limitE = DomUtil.addChildElement(sRequestElem, LIMIT, NAMESPACE);
if (nresults != NRESULTS_UNDEFINED) {
DomUtil.addChildElement(limitE, NRESULTS, NAMESPACE, nresults + "");
}
if (offset != OFFSET_UNDEFINED) {
// TODO define reasonable namespace...
DomUtil.addChildElement(limitE, OFFSET, Namespace.EMPTY_NAMESPACE, offset + "");
}
}
return sRequestElem;
}
/**
* Create a new <code>SearchInfo</code> from the specifying document
* retrieved from the request body.
*
* @param searchRequest
* @throws DavException if the root element's name is other than
* 'searchrequest' or if it does not contain a single child element specifying
* the query language to be used.
*/
public static SearchInfo createFromXml(Element searchRequest) throws DavException {
if (searchRequest == null || !XML_SEARCHREQUEST.equals(searchRequest.getLocalName())) {
log.warn("The root element must be 'searchrequest'.");
throw new DavException(DavServletResponse.SC_BAD_REQUEST);
}
Element first = DomUtil.getFirstChildElement(searchRequest);
Attr[] nsAttributes = DomUtil.getNamespaceAttributes(searchRequest);
Map<String, String> namespaces = new HashMap<String, String>();
for (Attr nsAttribute : nsAttributes) {
// filter out xmlns namespace and DAV namespace
if (!IGNORED_NAMESPACES.contains(nsAttribute.getValue())) {
namespaces.put(nsAttribute.getLocalName(), nsAttribute.getValue());
}
}
SearchInfo sInfo;
if (first != null) {
sInfo = new SearchInfo(first.getLocalName(), DomUtil.getNamespace(first), DomUtil.getText(first), namespaces);
} else {
log.warn("A single child element is expected with the 'DAV:searchrequest'.");
throw new DavException(DavServletResponse.SC_BAD_REQUEST);
}
Element limit = DomUtil.getChildElement(searchRequest, LIMIT, NAMESPACE);
if (limit != null) {
// try to get the value DAV:nresults element
String nresults = DomUtil.getChildTextTrim(limit, NRESULTS, NAMESPACE);
if (nresults != null) {
try {
sInfo.setNumberResults(Long.valueOf(nresults));
} catch (NumberFormatException e) {
log.error("DAV:nresults cannot be parsed into a long -> ignore.");
}
}
// try of an offset is defined within the DAV:limit element.
String offset = DomUtil.getChildTextTrim(limit, OFFSET, Namespace.EMPTY_NAMESPACE);
if (offset != null) {
try {
sInfo.setOffset(Long.valueOf(offset));
} catch (NumberFormatException e) {
log.error("'offset' cannot be parsed into a long -> ignore.");
}
}
}
return sInfo;
}
}
| |
/*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package powermock.classloading;
import javassist.CtClass;
import org.junit.Test;
import org.powermock.classloading.ClassloaderExecutor;
import org.powermock.core.classloader.MockClassLoader;
import org.powermock.core.transformers.MockTransformer;
import powermock.classloading.classes.*;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.LinkedList;
import java.util.concurrent.Callable;
import static org.junit.Assert.*;
public class XStreamClassloaderExecutorTest {
@Test
public void loadsObjectGraphInSpecifiedClassloaderAndReturnsResultInOriginalClassloader() throws Exception {
MockClassLoader classloader = createClassloader();
final MyReturnValue expectedConstructorValue = new MyReturnValue(new MyArgument("first value"));
final MyClass myClass = new MyClass(expectedConstructorValue);
final MyArgument expected = new MyArgument("A value");
MyReturnValue[] actual = new ClassloaderExecutor(classloader).execute(new Callable<MyReturnValue[]>() {
public MyReturnValue[] call() throws Exception {
assertEquals(MockClassLoader.class.getName(), this.getClass().getClassLoader().getClass().getName());
return myClass.myMethod(expected);
}
});
assertFalse(MockClassLoader.class.getName().equals(this.getClass().getClassLoader().getClass().getName()));
final MyReturnValue myReturnValue = actual[0];
assertEquals(expectedConstructorValue.getMyArgument().getValue(), myReturnValue.getMyArgument().getValue());
assertEquals(expected.getValue(), actual[1].getMyArgument().getValue());
}
@Test
public void loadsObjectGraphThatIncludesPrimitiveValuesInSpecifiedClassloaderAndReturnsResultInOriginalClassloader()
throws Exception {
MockClassLoader classloader = createClassloader();
final Integer expected = 42;
final MyIntegerHolder myClass = new MyIntegerHolder(expected);
Integer actual = new ClassloaderExecutor(classloader).execute(new Callable<Integer>() {
public Integer call() throws Exception {
assertEquals(MockClassLoader.class.getName(), this.getClass().getClassLoader().getClass().getName());
final int myInteger = myClass.getMyInteger();
assertEquals((int) expected, myInteger);
return myInteger;
}
});
assertFalse(MockClassLoader.class.getName().equals(this.getClass().getClassLoader().getClass().getName()));
assertEquals(expected, actual);
}
@Test
public void loadsObjectGraphThatIncludesEnumsInSpecifiedClassloaderAndReturnsResultInOriginalClassloader()
throws Exception {
MockClassLoader classloader = createClassloader();
final MyEnum expected = MyEnum.MyEnum1;
final MyEnumHolder myClass = new MyEnumHolder(expected);
MyEnum actual = new ClassloaderExecutor(classloader).execute(new Callable<MyEnum>() {
public MyEnum call() throws Exception {
assertEquals(MockClassLoader.class.getName(), this.getClass().getClassLoader().getClass().getName());
MyEnum myEnum = myClass.getMyEnum();
assertEquals(expected, myEnum);
return myEnum;
}
});
assertFalse(MockClassLoader.class.getName().equals(this.getClass().getClassLoader().getClass().getName()));
assertEquals(expected, actual);
}
@Test
public void clonesStaticFinalObjectFields() throws Exception {
MockClassLoader classloader = createClassloader();
final MyStaticFinalArgumentHolder expected = new MyStaticFinalArgumentHolder();
MyStaticFinalArgumentHolder actual = new ClassloaderExecutor(classloader)
.execute(new Callable<MyStaticFinalArgumentHolder>() {
public MyStaticFinalArgumentHolder call() throws Exception {
assertEquals(MockClassLoader.class.getName(), this.getClass().getClassLoader().getClass()
.getName());
MyStaticFinalArgumentHolder actual = new MyStaticFinalArgumentHolder();
assertEquals(expected.getMyObject(), actual.getMyObject());
return actual;
}
});
assertFalse(MockClassLoader.class.getName().equals(this.getClass().getClassLoader().getClass().getName()));
assertEquals(expected.getMyObject(), actual.getMyObject());
}
@Test
public void clonesStaticFinalPrimitiveFields() throws Exception {
MockClassLoader classloader = createClassloader();
final MyStaticFinalPrimitiveHolder expected = new MyStaticFinalPrimitiveHolder();
MyStaticFinalPrimitiveHolder actual = new ClassloaderExecutor(classloader)
.execute(new Callable<MyStaticFinalPrimitiveHolder>() {
public MyStaticFinalPrimitiveHolder call() throws Exception {
assertEquals(MockClassLoader.class.getName(), this.getClass().getClassLoader().getClass()
.getName());
MyStaticFinalPrimitiveHolder actual = new MyStaticFinalPrimitiveHolder();
assertEquals(expected.getMyInt(), actual.getMyInt());
return actual;
}
});
assertFalse(MockClassLoader.class.getName().equals(this.getClass().getClassLoader().getClass().getName()));
assertEquals(expected.getMyInt(), actual.getMyInt());
}
@Test
public void clonesStaticFinalNumberFields() throws Exception {
MockClassLoader classloader = createClassloader();
final MyStaticFinalNumberHolder expected = new MyStaticFinalNumberHolder();
MyStaticFinalNumberHolder actual = new ClassloaderExecutor(classloader)
.execute(new Callable<MyStaticFinalNumberHolder>() {
public MyStaticFinalNumberHolder call() throws Exception {
assertEquals(MockClassLoader.class.getName(), this.getClass().getClassLoader().getClass()
.getName());
MyStaticFinalNumberHolder actual = new MyStaticFinalNumberHolder();
assertEquals(expected.getMyLong(), actual.getMyLong());
return actual;
}
});
assertFalse(MockClassLoader.class.getName().equals(this.getClass().getClassLoader().getClass().getName()));
assertEquals(expected.getMyLong(), actual.getMyLong());
}
@Test
public void loadsObjectGraphThatIncludesPrimitiveArraysInSpecifiedClassloaderAndReturnsResultInOriginalClassloader()
throws Exception {
MockClassLoader classloader = createClassloader();
final int[] expected = new int[] { 1, 2 };
final MyPrimitiveArrayHolder myClass = new MyPrimitiveArrayHolder(expected);
int[] actual = new ClassloaderExecutor(classloader).execute(new Callable<int[]>() {
public int[] call() throws Exception {
assertEquals(MockClassLoader.class.getName(), this.getClass().getClassLoader().getClass().getName());
int[] myArray = myClass.getMyArray();
assertArrayEquals(expected, myArray);
return myArray;
}
});
assertFalse(MockClassLoader.class.getName().equals(this.getClass().getClassLoader().getClass().getName()));
assertArrayEquals(expected, actual);
}
@Test
public void loadsObjectGraphThatIncludesCollectionInSpecifiedClassloaderAndReturnsResultInOriginalClassloader()
throws Exception {
final MockClassLoader classloader = createClassloader();
final Collection<MyReturnValue> expected = new LinkedList<MyReturnValue>();
expected.add(new MyReturnValue(new MyArgument("one")));
expected.add(new MyReturnValue(new MyArgument("two")));
final MyCollectionHolder myClass = new MyCollectionHolder(expected);
Collection<?> actual = new ClassloaderExecutor(classloader).execute(new Callable<Collection<?>>() {
public Collection<?> call() throws Exception {
assertEquals(MockClassLoader.class.getName(), this.getClass().getClassLoader().getClass().getName());
Collection<?> myCollection = myClass.getMyCollection();
for (Object object : myCollection) {
assertEquals(MockClassLoader.class.getName(), object.getClass().getClassLoader().getClass()
.getName());
}
return myCollection;
}
});
assertFalse(MockClassLoader.class.getName().equals(this.getClass().getClassLoader().getClass().getName()));
assertEquals(2, actual.size());
for (Object object : actual) {
final String value = ((MyReturnValue) object).getMyArgument().getValue();
assertTrue(value.equals("one") || value.equals("two"));
}
}
@Test
public void usesReferenceCloningWhenTwoFieldsPointToSameInstance() throws Exception {
final MockClassLoader classloader = createClassloader();
final MyReferenceFieldHolder tested = new MyReferenceFieldHolder();
assertSame(tested.getMyArgument1(), tested.getMyArgument2());
assertSame(tested.getMyArgument1(), MyReferenceFieldHolder.MY_ARGUMENT);
new ClassloaderExecutor(classloader).execute(new Runnable() {
public void run() {
assertEquals(MockClassLoader.class.getName(), this.getClass().getClassLoader().getClass().getName());
assertEquals(tested.getMyArgument1(), tested.getMyArgument2());
assertEquals(tested.getMyArgument1(), MyReferenceFieldHolder.MY_ARGUMENT);
assertSame(tested.getMyArgument1(), tested.getMyArgument2());
// FIXME: This assertion should work:
// assertSame(tested.getMyArgument1(), MyReferenceFieldHolder.MY_ARGUMENT);
}
});
}
@Test
public void worksWithObjectHierarchy() throws Exception {
final MockClassLoader classloader = createClassloader();
final MyHierarchicalFieldHolder tested = new MyHierarchicalFieldHolder();
assertSame(tested.getMyArgument1(), tested.getMyArgument2());
assertEquals(tested.getMyArgument3(), tested.getMyArgument2());
new ClassloaderExecutor(classloader).execute(new Runnable() {
public void run() {
assertEquals(MockClassLoader.class.getName(), this.getClass().getClassLoader().getClass().getName());
assertSame(tested.getMyArgument1(), tested.getMyArgument2());
assertEquals(tested.getMyArgument3(), tested.getMyArgument2());
}
});
}
@Test
public void worksWithObjectHierarchyAndOverloadedFields() throws Exception {
final MockClassLoader classloader = createClassloader();
final MyHierarchicalOverloadedFieldHolder tested = new MyHierarchicalOverloadedFieldHolder();
assertSame(tested.getMyArgument1(), tested.getMyArgument2());
assertEquals(tested.getMyArgument1(), tested.getMyArgument3());
assertSame(tested.getMyArgument3(), MyHierarchicalOverloadedFieldHolder.MY_ARGUMENT);
assertNotSame(MyReferenceFieldHolder.MY_ARGUMENT, MyHierarchicalOverloadedFieldHolder.MY_ARGUMENT);
assertEquals(MyReferenceFieldHolder.MY_ARGUMENT, MyHierarchicalOverloadedFieldHolder.MY_ARGUMENT);
new ClassloaderExecutor(classloader).execute(new Runnable() {
public void run() {
assertEquals(MockClassLoader.class.getName(), this.getClass().getClassLoader().getClass().getName());
assertSame(tested.getMyArgument1(), tested.getMyArgument2());
assertEquals(tested.getMyArgument1(), tested.getMyArgument3());
// Note: Cannot be same using X-Stream
assertEquals(tested.getMyArgument3(), MyHierarchicalOverloadedFieldHolder.MY_ARGUMENT);
assertNotSame(MyReferenceFieldHolder.MY_ARGUMENT, MyHierarchicalOverloadedFieldHolder.MY_ARGUMENT);
assertEquals(MyReferenceFieldHolder.MY_ARGUMENT, MyHierarchicalOverloadedFieldHolder.MY_ARGUMENT);
}
});
}
@Test
public void worksWithReflection() throws Exception {
final MockClassLoader classloader = createClassloader();
final MyArgument myArgument = new MyArgument("test");
final MyReturnValue instance = new MyReturnValue(myArgument);
Method method = instance.getClass().getMethod("getMyArgument");
final ReflectionMethodInvoker tested = new ReflectionMethodInvoker(method, instance);
new ClassloaderExecutor(classloader).execute(new Runnable() {
public void run() {
Object invoke = tested.invoke();
assertSame(invoke, myArgument);
}
});
}
private MockClassLoader createClassloader() {
MockClassLoader classloader = new MockClassLoader(new String[] { MyClass.class.getName(),
MyArgument.class.getName(), MyReturnValue.class.getName() });
MockTransformer mainMockTransformer = new MockTransformer() {
public CtClass transform(CtClass clazz) throws Exception {
return clazz;
}
};
LinkedList<MockTransformer> linkedList = new LinkedList<MockTransformer>();
linkedList.add(mainMockTransformer);
classloader.setMockTransformerChain(linkedList);
return classloader;
}
}
| |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <p>
*/
package org.olat.course.nodes.dialog;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.olat.core.commons.modules.bc.FileUploadController;
import org.olat.core.commons.modules.bc.FolderConfig;
import org.olat.core.commons.modules.bc.FolderEvent;
import org.olat.core.commons.modules.bc.vfs.OlatRootFolderImpl;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.Component;
import org.olat.core.gui.components.link.Link;
import org.olat.core.gui.components.link.LinkFactory;
import org.olat.core.gui.components.tabbedpane.TabbedPane;
import org.olat.core.gui.components.table.TableController;
import org.olat.core.gui.components.table.TableGuiConfiguration;
import org.olat.core.gui.components.velocity.VelocityContainer;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.ControllerEventListener;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.generic.tabbable.ActivateableTabbableDefaultController;
import org.olat.core.gui.translator.PackageTranslator;
import org.olat.core.logging.activity.CourseLoggingAction;
import org.olat.core.logging.activity.ThreadLocalUserActivityLogger;
import org.olat.core.util.Util;
import org.olat.core.util.vfs.Quota;
import org.olat.course.ICourse;
import org.olat.course.assessment.AssessmentHelper;
import org.olat.course.condition.Condition;
import org.olat.course.condition.ConditionEditController;
import org.olat.course.editor.NodeEditController;
import org.olat.course.groupsandrights.CourseGroupManager;
import org.olat.course.nodes.BCCourseNode;
import org.olat.course.nodes.DialogCourseNode;
import org.olat.course.run.userview.UserCourseEnvironment;
import org.olat.course.tree.CourseEditorTreeModel;
import org.olat.modules.dialog.DialogElement;
import org.olat.modules.dialog.DialogElementsController;
import org.olat.modules.dialog.DialogElementsPropertyManager;
import org.olat.modules.dialog.DialogElementsTableModel;
import org.olat.modules.dialog.DialogPropertyElements;
import org.olat.modules.fo.Forum;
import org.olat.modules.fo.ForumManager;
import org.olat.util.logging.activity.LoggingResourceable;
/**
* Description:<br>
* controller for the tabbed pane inside the course editor for the course node 'dialog elements'
* <P>
* Initial Date: 02.11.2005 <br>
*
* @author guido
*/
public class DialogCourseNodeEditController extends ActivateableTabbableDefaultController implements ControllerEventListener {
private static final String PANE_TAB_DIALOGCONFIG = "pane.tab.dialogconfig";
private static final String PANE_TAB_ACCESSIBILITY = "pane.tab.accessibility";
private static final String[] paneKeys = { PANE_TAB_DIALOGCONFIG, PANE_TAB_ACCESSIBILITY };
private final VelocityContainer content, accessContent;
private final DialogCourseNode courseNode;
private final ConditionEditController readerCondContr, posterCondContr, moderatorCondContr;
private TabbedPane myTabbedPane;
private final BCCourseNode bcNode = new BCCourseNode();
private final ICourse course;
private DialogConfigForm configForumLaunch;
private TableController tableCtr;
private final PackageTranslator resourceTrans;
private FileUploadController fileUplCtr;
private DialogElement recentElement;
private final TableGuiConfiguration tableConf;
private final Link uploadButton;
public DialogCourseNodeEditController(final UserRequest ureq, final WindowControl wControl, final DialogCourseNode node, final ICourse course,
final UserCourseEnvironment userCourseEnv) {
super(ureq, wControl);
// o_clusterOk by guido: save to hold reference to course inside editor
this.course = course;
this.courseNode = node;
this.resourceTrans = new PackageTranslator(Util.getPackageName(DialogElementsTableModel.class), ureq.getLocale(), getTranslator());
// set name of the folder we use
bcNode.setShortTitle(translate("dialog.folder.name"));
// dialog specific config tab
content = this.createVelocityContainer("edit");
uploadButton = LinkFactory.createButton("dialog.upload.file", content, this);
// configure table
tableConf = new TableGuiConfiguration();
tableConf.setResultsPerPage(10);
showOverviewTable(ureq);
initConfigForm(ureq);
// accessability config tab
accessContent = this.createVelocityContainer("edit_access");
final CourseGroupManager groupMgr = course.getCourseEnvironment().getCourseGroupManager();
final CourseEditorTreeModel editorModel = course.getEditorTreeModel();
// Reader precondition
final Condition readerCondition = courseNode.getPreConditionReader();
// TODO:gs:a getAssessableNodes ist der dialog node assessable oder nicht?
readerCondContr = new ConditionEditController(ureq, getWindowControl(), groupMgr, readerCondition, "readerConditionForm", AssessmentHelper.getAssessableNodes(
editorModel, courseNode), userCourseEnv);
this.listenTo(readerCondContr);
accessContent.put("readerCondition", readerCondContr.getInitialComponent());
// Poster precondition
final Condition posterCondition = courseNode.getPreConditionPoster();
posterCondContr = new ConditionEditController(ureq, getWindowControl(), groupMgr, posterCondition, "posterConditionForm", AssessmentHelper.getAssessableNodes(
editorModel, courseNode), userCourseEnv);
this.listenTo(posterCondContr);
accessContent.put("posterCondition", posterCondContr.getInitialComponent());
// Moderator precondition
final Condition moderatorCondition = courseNode.getPreConditionModerator();
moderatorCondContr = new ConditionEditController(ureq, getWindowControl(), groupMgr, moderatorCondition, "moderatorConditionForm",
AssessmentHelper.getAssessableNodes(editorModel, courseNode), userCourseEnv);
// FIXME:gs: why is firing needed here?
fireEvent(ureq, NodeEditController.NODECONFIG_CHANGED_EVENT);
this.listenTo(moderatorCondContr);
accessContent.put("moderatorCondition", moderatorCondContr.getInitialComponent());
}
private void initConfigForm(final UserRequest ureq) {
removeAsListenerAndDispose(configForumLaunch);
configForumLaunch = new DialogConfigForm(ureq, getWindowControl(), courseNode.getModuleConfiguration());
listenTo(configForumLaunch);
content.put("showForumAsPopupConfigForm", configForumLaunch.getInitialComponent());
}
/**
* @see org.olat.core.gui.control.generic.tabbable.ActivateableTabbableDefaultController#getPaneKeys()
*/
@Override
public String[] getPaneKeys() {
return paneKeys;
}
/**
* @see org.olat.core.gui.control.generic.tabbable.ActivateableTabbableDefaultController#getTabbedPane()
*/
@Override
public TabbedPane getTabbedPane() {
return myTabbedPane;
}
/**
* @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest, org.olat.core.gui.components.Component, org.olat.core.gui.control.Event)
*/
@Override
public void event(final UserRequest ureq, final Component source, final Event event) {
if (source == uploadButton) {
final Forum forum = ForumManager.getInstance().addAForum();
final OlatRootFolderImpl forumContainer = DialogElementsController.getForumContainer(forum.getKey());
removeAsListenerAndDispose(fileUplCtr);
fileUplCtr = new FileUploadController(getWindowControl(), forumContainer, ureq, (int) FolderConfig.getLimitULKB(), Quota.UNLIMITED, null, false);
listenTo(fileUplCtr);
recentElement = new DialogElement();
recentElement.setForumKey(forum.getKey());
recentElement.setAuthor(ureq.getIdentity().getName());
content.contextPut("overview", Boolean.FALSE);
content.put("upload", fileUplCtr.getInitialComponent());
}
}
/**
* @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest, org.olat.core.gui.components.Component, org.olat.core.gui.control.Event)
*/
@Override
public void event(final UserRequest ureq, final Controller source, final Event event) {
if (source == configForumLaunch) {
if (event == Event.CHANGED_EVENT) {
fireEvent(ureq, NodeEditController.NODECONFIG_CHANGED_EVENT);
}
} else if (source == readerCondContr) {
if (event == Event.CHANGED_EVENT) {
final Condition cond = readerCondContr.getCondition();
courseNode.setPreConditionReader(cond);
fireEvent(ureq, NodeEditController.NODECONFIG_CHANGED_EVENT);
}
} else if (source == posterCondContr) {
if (event == Event.CHANGED_EVENT) {
final Condition cond = posterCondContr.getCondition();
courseNode.setPreConditionPoster(cond);
fireEvent(ureq, NodeEditController.NODECONFIG_CHANGED_EVENT);
}
} else if (source == moderatorCondContr) {
if (event == Event.CHANGED_EVENT) {
final Condition cond = moderatorCondContr.getCondition();
courseNode.setPreConditionModerator(cond);
fireEvent(ureq, NodeEditController.NODECONFIG_CHANGED_EVENT);
}
} else if (source == tableCtr) {
// process table events
} else if (source == fileUplCtr) {
// event.
if (event == Event.DONE_EVENT || event == Event.CANCELLED_EVENT) {
// reset recent element
recentElement = null;
showOverviewTable(ureq);
} else if (event.getCommand().equals(FolderEvent.UPLOAD_EVENT)) {
// new dialog element
final DialogElement element = new DialogElement();
element.setAuthor(recentElement.getAuthor());
element.setDate(new Date());
final String filename = ((FolderEvent) event).getFilename();
element.setFilename(filename);
element.setForumKey(recentElement.getForumKey());
element.setFileSize(DialogElementsController.getFileSize(recentElement.getForumKey()));
// save property
DialogElementsPropertyManager.getInstance().addDialogElement(course.getCourseEnvironment().getCoursePropertyManager(), courseNode, element);
// do logging
ThreadLocalUserActivityLogger.log(CourseLoggingAction.DIALOG_ELEMENT_FILE_UPLOADED, getClass(), LoggingResourceable.wrapUploadFile(filename));
}
}
}
/**
* update table with latest elements
*
* @param ureq
*/
private void showOverviewTable(final UserRequest ureq) {
removeAsListenerAndDispose(tableCtr);
tableCtr = new TableController(tableConf, ureq, getWindowControl(), resourceTrans);
listenTo(tableCtr);
final DialogPropertyElements elements = DialogElementsPropertyManager.getInstance().findDialogElements(
this.course.getCourseEnvironment().getCoursePropertyManager(), courseNode);
List list = new ArrayList();
final DialogElementsTableModel tableModel = new DialogElementsTableModel(getTranslator(), null, null);
if (elements != null) {
list = elements.getDialogPropertyElements();
}
tableModel.setEntries(list);
tableModel.addColumnDescriptors(tableCtr);
tableCtr.setTableDataModel(tableModel);
tableCtr.modelChanged();
tableCtr.setSortColumn(1, true);
content.contextPut("overview", Boolean.TRUE);
content.put("dialogElementsTable", tableCtr.getInitialComponent());
}
/**
* @see org.olat.core.gui.control.DefaultController#doDispose(boolean)
*/
@Override
protected void doDispose() {
// child controllers registered with listenTo() get disposed in BasicController
}
/**
* @see org.olat.core.gui.control.generic.tabbable.TabbableController#addTabs(org.olat.core.gui.components.tabbedpane.TabbedPane)
*/
@Override
public void addTabs(final TabbedPane tabbedPane) {
tabbedPane.addTab(translate(PANE_TAB_ACCESSIBILITY), accessContent);
tabbedPane.addTab(translate(PANE_TAB_DIALOGCONFIG), content);
}
}
| |
// Copyright (C) 2011 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.rules;
import static com.googlecode.prolog_cafe.lang.PrologMachineCopy.save;
import com.google.common.base.Joiner;
import com.google.common.cache.Cache;
import com.google.common.collect.ImmutableList;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.server.cache.CacheModule;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.config.SitePaths;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.gerrit.server.plugincontext.PluginSetContext;
import com.google.gerrit.server.project.ProjectCacheImpl;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.google.inject.name.Named;
import com.googlecode.prolog_cafe.exceptions.CompileException;
import com.googlecode.prolog_cafe.exceptions.SyntaxException;
import com.googlecode.prolog_cafe.exceptions.TermException;
import com.googlecode.prolog_cafe.lang.BufferingPrologControl;
import com.googlecode.prolog_cafe.lang.JavaObjectTerm;
import com.googlecode.prolog_cafe.lang.ListTerm;
import com.googlecode.prolog_cafe.lang.Prolog;
import com.googlecode.prolog_cafe.lang.PrologClassLoader;
import com.googlecode.prolog_cafe.lang.PrologMachineCopy;
import com.googlecode.prolog_cafe.lang.StructureTerm;
import com.googlecode.prolog_cafe.lang.SymbolTerm;
import com.googlecode.prolog_cafe.lang.Term;
import java.io.IOException;
import java.io.PushbackReader;
import java.io.Reader;
import java.io.StringReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.concurrent.ExecutionException;
import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.util.RawParseUtils;
/**
* Manages a cache of compiled Prolog rules.
*
* <p>Rules are loaded from the {@code site_path/cache/rules/rules-SHA1.jar}, where {@code SHA1} is
* the SHA1 of the Prolog {@code rules.pl} in a project's {@link RefNames#REFS_CONFIG} branch.
*/
@Singleton
public class RulesCache {
public static class Module extends CacheModule {
@Override
protected void configure() {
cache(RulesCache.CACHE_NAME, ObjectId.class, PrologMachineCopy.class)
// This cache is auxiliary to the project cache, so size it the same.
.configKey(ProjectCacheImpl.CACHE_PROJECTS_BYNAME);
}
}
private static final ImmutableList<String> PACKAGE_LIST =
ImmutableList.of(Prolog.BUILTIN, "gerrit");
static final String CACHE_NAME = "prolog_rules";
private final boolean enableProjectRules;
private final int maxDbSize;
private final int maxSrcBytes;
private final Path cacheDir;
private final Path rulesDir;
private final GitRepositoryManager gitMgr;
private final PluginSetContext<PredicateProvider> predicateProviders;
private final ClassLoader systemLoader;
private final PrologMachineCopy defaultMachine;
private final Cache<ObjectId, PrologMachineCopy> machineCache;
@Inject
protected RulesCache(
@GerritServerConfig Config config,
SitePaths site,
GitRepositoryManager gm,
PluginSetContext<PredicateProvider> predicateProviders,
@Named(CACHE_NAME) Cache<ObjectId, PrologMachineCopy> machineCache) {
maxDbSize = config.getInt("rules", null, "maxPrologDatabaseSize", 256);
maxSrcBytes = config.getInt("rules", null, "maxSourceBytes", 128 << 10);
enableProjectRules = config.getBoolean("rules", null, "enable", true) && maxSrcBytes > 0;
cacheDir = site.resolve(config.getString("cache", null, "directory"));
rulesDir = cacheDir != null ? cacheDir.resolve("rules") : null;
gitMgr = gm;
this.predicateProviders = predicateProviders;
this.machineCache = machineCache;
systemLoader = getClass().getClassLoader();
defaultMachine = save(newEmptyMachine(systemLoader));
}
public boolean isProjectRulesEnabled() {
return enableProjectRules;
}
/**
* Locate a cached Prolog machine state, or create one if not available.
*
* @return a Prolog machine, after loading the specified rules.
* @throws CompileException the machine cannot be created.
*/
public synchronized PrologMachineCopy loadMachine(Project.NameKey project, ObjectId rulesId)
throws CompileException {
if (!enableProjectRules || project == null || rulesId == null) {
return defaultMachine;
}
try {
return machineCache.get(rulesId, () -> createMachine(project, rulesId));
} catch (ExecutionException e) {
if (e.getCause() instanceof CompileException) {
throw new CompileException(e.getCause().getMessage(), e);
}
throw new CompileException("Error while consulting rules from " + project, e);
}
}
public PrologMachineCopy loadMachine(String name, Reader in) throws CompileException {
PrologMachineCopy pmc = consultRules(name, in);
if (pmc == null) {
throw new CompileException("Cannot consult rules from the stream " + name);
}
return pmc;
}
private PrologMachineCopy createMachine(Project.NameKey project, ObjectId rulesId)
throws CompileException {
// If the rules are available as a complied JAR on local disk, prefer
// that over dynamic consult as the bytecode will be faster.
//
if (rulesDir != null) {
Path jarPath = rulesDir.resolve("rules-" + rulesId.getName() + ".jar");
if (Files.isRegularFile(jarPath)) {
URL[] cp = new URL[] {toURL(jarPath)};
return save(newEmptyMachine(URLClassLoader.newInstance(cp, systemLoader)));
}
}
// Dynamically consult the rules into the machine's internal database.
//
String rules = read(project, rulesId);
PrologMachineCopy pmc = consultRules("rules.pl", new StringReader(rules));
if (pmc == null) {
throw new CompileException("Cannot consult rules of " + project);
}
return pmc;
}
private PrologMachineCopy consultRules(String name, Reader rules) throws CompileException {
BufferingPrologControl ctl = newEmptyMachine(systemLoader);
PushbackReader in = new PushbackReader(rules, Prolog.PUSHBACK_SIZE);
try {
if (!ctl.execute(
Prolog.BUILTIN, "consult_stream", SymbolTerm.intern(name), new JavaObjectTerm(in))) {
return null;
}
} catch (SyntaxException e) {
throw new CompileException(e.toString(), e);
} catch (TermException e) {
Term m = e.getMessageTerm();
if (m instanceof StructureTerm && "syntax_error".equals(m.name()) && m.arity() >= 1) {
StringBuilder msg = new StringBuilder();
if (m.arg(0) instanceof ListTerm) {
msg.append(Joiner.on(' ').join(((ListTerm) m.arg(0)).toJava()));
} else {
msg.append(m.arg(0).toString());
}
if (m.arity() == 2 && m.arg(1) instanceof StructureTerm && "at".equals(m.arg(1).name())) {
Term at = m.arg(1).arg(0).dereference();
if (at instanceof ListTerm) {
msg.append(" at: ");
msg.append(prettyProlog(at));
}
}
throw new CompileException(msg.toString(), e);
}
throw new CompileException("Error while consulting rules from " + name, e);
} catch (RuntimeException e) {
throw new CompileException("Error while consulting rules from " + name, e);
}
return save(ctl);
}
private static String prettyProlog(Term at) {
StringBuilder b = new StringBuilder();
for (Object o : ((ListTerm) at).toJava()) {
if (o instanceof Term) {
Term t = (Term) o;
if (!(t instanceof StructureTerm)) {
b.append(t.toString()).append(' ');
continue;
}
switch (t.name()) {
case "atom":
SymbolTerm atom = (SymbolTerm) t.arg(0);
b.append(atom.toString());
break;
case "var":
b.append(t.arg(0).toString());
break;
}
} else {
b.append(o);
}
}
return b.toString().trim();
}
private String read(Project.NameKey project, ObjectId rulesId) throws CompileException {
try (Repository git = gitMgr.openRepository(project)) {
try {
ObjectLoader ldr = git.open(rulesId, Constants.OBJ_BLOB);
byte[] raw = ldr.getCachedBytes(maxSrcBytes);
return RawParseUtils.decode(raw);
} catch (LargeObjectException e) {
throw new CompileException("rules of " + project + " are too large", e);
} catch (RuntimeException | IOException e) {
throw new CompileException("Cannot load rules of " + project, e);
}
} catch (IOException e) {
throw new CompileException("Cannot open repository " + project, e);
}
}
private BufferingPrologControl newEmptyMachine(ClassLoader cl) {
BufferingPrologControl ctl = new BufferingPrologControl();
ctl.setMaxDatabaseSize(maxDbSize);
ctl.setPrologClassLoader(
new PrologClassLoader(new PredicateClassLoader(predicateProviders, cl)));
ctl.setEnabled(EnumSet.allOf(Prolog.Feature.class), false);
List<String> packages = new ArrayList<>();
packages.addAll(PACKAGE_LIST);
predicateProviders.runEach(
predicateProvider -> packages.addAll(predicateProvider.getPackages()));
// Bootstrap the interpreter and ensure there is clean state.
ctl.initialize(packages.toArray(new String[packages.size()]));
return ctl;
}
private static URL toURL(Path jarPath) throws CompileException {
try {
return jarPath.toUri().toURL();
} catch (MalformedURLException e) {
throw new CompileException("Cannot create URL for " + jarPath, e);
}
}
}
| |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.graphics.glutils;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.Mesh;
import com.badlogic.gdx.graphics.VertexAttribute;
import com.badlogic.gdx.graphics.VertexAttributes.Usage;
import com.badlogic.gdx.math.Matrix4;
import com.badlogic.gdx.utils.Array;
/** Immediate mode rendering class for GLES 2.0. The renderer will allow you to specify vertices on the fly and provides a default
* shader for (unlit) rendering.</p> *
*
* @author mzechner */
public class ImmediateModeRenderer20 implements ImmediateModeRenderer {
private int primitiveType;
private int vertexIdx;
private int numSetTexCoords;
private final int maxVertices;
private int numVertices;
private final Mesh mesh;
private ShaderProgram shader;
private boolean ownsShader;
private final int numTexCoords;
private final int vertexSize;
private final int normalOffset;
private final int colorOffset;
private final int texCoordOffset;
private final Matrix4 projModelView = new Matrix4();
private final float[] vertices;
public ImmediateModeRenderer20 (boolean hasNormals, boolean hasColors, int numTexCoords) {
this(5000, hasNormals, hasColors, numTexCoords, createDefaultShader(hasNormals, hasColors, numTexCoords));
ownsShader = true;
}
public ImmediateModeRenderer20 (int maxVertices, boolean hasNormals, boolean hasColors, int numTexCoords) {
this(maxVertices, hasNormals, hasColors, numTexCoords, createDefaultShader(hasNormals, hasColors, numTexCoords));
ownsShader = true;
}
public ImmediateModeRenderer20 (int maxVertices, boolean hasNormals, boolean hasColors, int numTexCoords, ShaderProgram shader) {
this.maxVertices = maxVertices;
this.numTexCoords = numTexCoords;
this.shader = shader;
VertexAttribute[] attribs = buildVertexAttributes(hasNormals, hasColors, numTexCoords);
mesh = new Mesh(false, maxVertices, 0, attribs);
vertices = new float[maxVertices * (mesh.getVertexAttributes().vertexSize / 4)];
vertexSize = mesh.getVertexAttributes().vertexSize / 4;
normalOffset = mesh.getVertexAttribute(Usage.Normal) != null ? mesh.getVertexAttribute(Usage.Normal).offset / 4 : 0;
colorOffset = mesh.getVertexAttribute(Usage.ColorPacked) != null ? mesh.getVertexAttribute(Usage.ColorPacked).offset / 4
: 0;
texCoordOffset = mesh.getVertexAttribute(Usage.TextureCoordinates) != null ? mesh
.getVertexAttribute(Usage.TextureCoordinates).offset / 4 : 0;
}
private VertexAttribute[] buildVertexAttributes (boolean hasNormals, boolean hasColor, int numTexCoords) {
Array<VertexAttribute> attribs = new Array<VertexAttribute>();
attribs.add(new VertexAttribute(Usage.Position, 3, ShaderProgram.POSITION_ATTRIBUTE));
if (hasNormals) attribs.add(new VertexAttribute(Usage.Normal, 3, ShaderProgram.NORMAL_ATTRIBUTE));
if (hasColor) attribs.add(new VertexAttribute(Usage.ColorPacked, 4, ShaderProgram.COLOR_ATTRIBUTE));
for (int i = 0; i < numTexCoords; i++) {
attribs.add(new VertexAttribute(Usage.TextureCoordinates, 2, ShaderProgram.TEXCOORD_ATTRIBUTE + i));
}
VertexAttribute[] array = new VertexAttribute[attribs.size];
for (int i = 0; i < attribs.size; i++)
array[i] = attribs.get(i);
return array;
}
public void setShader (ShaderProgram shader) {
if (ownsShader) this.shader.dispose();
this.shader = shader;
ownsShader = false;
}
public void begin (Matrix4 projModelView, int primitiveType) {
this.projModelView.set(projModelView);
this.primitiveType = primitiveType;
}
public void color (float r, float g, float b, float a) {
vertices[vertexIdx + colorOffset] = Color.toFloatBits(r, g, b, a);
}
public void texCoord (float u, float v) {
final int idx = vertexIdx + texCoordOffset;
vertices[idx + numSetTexCoords] = u;
vertices[idx + numSetTexCoords + 1] = v;
numSetTexCoords += 2;
}
public void normal (float x, float y, float z) {
final int idx = vertexIdx + normalOffset;
vertices[idx] = x;
vertices[idx + 1] = y;
vertices[idx + 2] = z;
}
public void vertex (float x, float y, float z) {
final int idx = vertexIdx;
vertices[idx] = x;
vertices[idx + 1] = y;
vertices[idx + 2] = z;
numSetTexCoords = 0;
vertexIdx += vertexSize;
numVertices++;
}
public void end () {
if (numVertices == 0) return;
shader.begin();
shader.setUniformMatrix("u_projModelView", projModelView);
for (int i = 0; i < numTexCoords; i++)
shader.setUniformi("u_sampler" + i, i);
mesh.setVertices(vertices, 0, vertexIdx);
mesh.render(shader, primitiveType);
shader.end();
numSetTexCoords = 0;
vertexIdx = 0;
numVertices = 0;
}
public int getNumVertices () {
return numVertices;
}
@Override
public int getMaxVertices () {
return maxVertices;
}
public void dispose () {
if (ownsShader && shader != null) shader.dispose();
mesh.dispose();
}
static private String createVertexShader (boolean hasNormals, boolean hasColors, int numTexCoords) {
String shader = "attribute vec4 " + ShaderProgram.POSITION_ATTRIBUTE + ";\n"
+ (hasNormals ? "attribute vec3 " + ShaderProgram.NORMAL_ATTRIBUTE + ";\n" : "")
+ (hasColors ? "attribute vec4 " + ShaderProgram.COLOR_ATTRIBUTE + ";\n" : "");
for (int i = 0; i < numTexCoords; i++) {
shader += "attribute vec2 " + ShaderProgram.TEXCOORD_ATTRIBUTE + i + ";\n";
}
shader += "uniform mat4 u_projModelView;\n";
shader += (hasColors ? "varying vec4 v_col;\n" : "");
for (int i = 0; i < numTexCoords; i++) {
shader += "varying vec2 v_tex" + i + ";\n";
}
shader += "void main() {\n" + " gl_Position = u_projModelView * " + ShaderProgram.POSITION_ATTRIBUTE + ";\n"
+ (hasColors ? " v_col = " + ShaderProgram.COLOR_ATTRIBUTE + ";\n" : "");
for (int i = 0; i < numTexCoords; i++) {
shader += " v_tex" + i + " = " + ShaderProgram.TEXCOORD_ATTRIBUTE + i + ";\n";
}
shader += " gl_PointSize = 1.0;\n";
shader += "}\n";
return shader;
}
static private String createFragmentShader (boolean hasNormals, boolean hasColors, int numTexCoords) {
String shader = "#ifdef GL_ES\n" + "precision mediump float;\n" + "#endif\n";
if (hasColors) shader += "varying vec4 v_col;\n";
for (int i = 0; i < numTexCoords; i++) {
shader += "varying vec2 v_tex" + i + ";\n";
shader += "uniform sampler2D u_sampler" + i + ";\n";
}
shader += "void main() {\n" + " gl_FragColor = " + (hasColors ? "v_col" : "vec4(1, 1, 1, 1)");
if (numTexCoords > 0) shader += " * ";
for (int i = 0; i < numTexCoords; i++) {
if (i == numTexCoords - 1) {
shader += " texture2D(u_sampler" + i + ", v_tex" + i + ")";
} else {
shader += " texture2D(u_sampler" + i + ", v_tex" + i + ") *";
}
}
shader += ";\n}";
return shader;
}
/** Returns a new instance of the default shader used by SpriteBatch for GL2 when no shader is specified. */
static public ShaderProgram createDefaultShader (boolean hasNormals, boolean hasColors, int numTexCoords) {
String vertexShader = createVertexShader(hasNormals, hasColors, numTexCoords);
String fragmentShader = createFragmentShader(hasNormals, hasColors, numTexCoords);
ShaderProgram program = new ShaderProgram(vertexShader, fragmentShader);
return program;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.client.api.impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.AbstractMap.SimpleEntry;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.ApplicationMasterProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerResourceChangeRequest;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.NMToken;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.client.ClientRMProxy;
import org.apache.hadoop.yarn.client.api.AMRMClient;
import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest;
import org.apache.hadoop.yarn.client.api.InvalidContainerRequestException;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.ApplicationMasterNotRegisteredException;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
import org.apache.hadoop.yarn.util.RackResolver;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import org.apache.hadoop.yarn.util.resource.Resources;
@Private
@Unstable
public class AMRMClientImpl<T extends ContainerRequest> extends AMRMClient<T> {
private static final Log LOG = LogFactory.getLog(AMRMClientImpl.class);
private static final List<String> ANY_LIST =
Collections.singletonList(ResourceRequest.ANY);
private int lastResponseId = 0;
protected String appHostName;
protected int appHostPort;
protected String appTrackingUrl;
protected ApplicationMasterProtocol rmClient;
protected Resource clusterAvailableResources;
protected int clusterNodeCount;
// blacklistedNodes is required for keeping history of blacklisted nodes that
// are sent to RM. On RESYNC command from RM, blacklistedNodes are used to get
// current blacklisted nodes and send back to RM.
protected final Set<String> blacklistedNodes = new HashSet<String>();
protected final Set<String> blacklistAdditions = new HashSet<String>();
protected final Set<String> blacklistRemovals = new HashSet<String>();
class ResourceRequestInfo {
ResourceRequest remoteRequest;
LinkedHashSet<T> containerRequests;
ResourceRequestInfo(Priority priority, String resourceName,
Resource capability, boolean relaxLocality) {
remoteRequest = ResourceRequest.newInstance(priority, resourceName,
capability, 0);
remoteRequest.setRelaxLocality(relaxLocality);
containerRequests = new LinkedHashSet<T>();
}
}
/**
* Class compares Resource by memory then cpu in reverse order
*/
class ResourceReverseMemoryThenCpuComparator implements Comparator<Resource> {
@Override
public int compare(Resource arg0, Resource arg1) {
int mem0 = arg0.getMemory();
int mem1 = arg1.getMemory();
int cpu0 = arg0.getVirtualCores();
int cpu1 = arg1.getVirtualCores();
if(mem0 == mem1) {
if(cpu0 == cpu1) {
return 0;
}
if(cpu0 < cpu1) {
return 1;
}
return -1;
}
if(mem0 < mem1) {
return 1;
}
return -1;
}
}
static boolean canFit(Resource arg0, Resource arg1) {
int mem0 = arg0.getMemory();
int mem1 = arg1.getMemory();
int cpu0 = arg0.getVirtualCores();
int cpu1 = arg1.getVirtualCores();
return (mem0 <= mem1 && cpu0 <= cpu1);
}
//Key -> Priority
//Value -> Map
//Key->ResourceName (e.g., nodename, rackname, *)
//Value->Map
//Key->Resource Capability
//Value->ResourceRequest
protected final
Map<Priority, Map<String, TreeMap<Resource, ResourceRequestInfo>>>
remoteRequestsTable =
new TreeMap<Priority, Map<String, TreeMap<Resource, ResourceRequestInfo>>>();
protected final Set<ResourceRequest> ask = new TreeSet<ResourceRequest>(
new org.apache.hadoop.yarn.api.records.ResourceRequest.ResourceRequestComparator());
protected final Set<ContainerId> release = new TreeSet<ContainerId>();
// pendingRelease holds history of release requests.
// request is removed only if RM sends completedContainer.
// How it different from release? --> release is for per allocate() request.
protected Set<ContainerId> pendingRelease = new TreeSet<ContainerId>();
// change map holds container resource change requests between two allocate()
// calls, and are cleared after each successful allocate() call.
protected final Map<ContainerId, SimpleEntry<Container, Resource>> change =
new HashMap<>();
// pendingChange map holds history of container resource change requests in
// case AM needs to reregister with the ResourceManager.
// Change requests are removed from this map if RM confirms the change
// through allocate response, or if RM confirms that the container has been
// completed.
protected final Map<ContainerId, SimpleEntry<Container, Resource>>
pendingChange = new HashMap<>();
public AMRMClientImpl() {
super(AMRMClientImpl.class.getName());
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
RackResolver.init(conf);
super.serviceInit(conf);
}
@Override
protected void serviceStart() throws Exception {
final YarnConfiguration conf = new YarnConfiguration(getConfig());
try {
rmClient =
ClientRMProxy.createRMProxy(conf, ApplicationMasterProtocol.class);
} catch (IOException e) {
throw new YarnRuntimeException(e);
}
super.serviceStart();
}
@Override
protected void serviceStop() throws Exception {
if (this.rmClient != null) {
RPC.stopProxy(this.rmClient);
}
super.serviceStop();
}
@Override
public RegisterApplicationMasterResponse registerApplicationMaster(
String appHostName, int appHostPort, String appTrackingUrl)
throws YarnException, IOException {
this.appHostName = appHostName;
this.appHostPort = appHostPort;
this.appTrackingUrl = appTrackingUrl;
Preconditions.checkArgument(appHostName != null,
"The host name should not be null");
Preconditions.checkArgument(appHostPort >= -1, "Port number of the host"
+ " should be any integers larger than or equal to -1");
return registerApplicationMaster();
}
private RegisterApplicationMasterResponse registerApplicationMaster()
throws YarnException, IOException {
RegisterApplicationMasterRequest request =
RegisterApplicationMasterRequest.newInstance(this.appHostName,
this.appHostPort, this.appTrackingUrl);
RegisterApplicationMasterResponse response =
rmClient.registerApplicationMaster(request);
synchronized (this) {
lastResponseId = 0;
if (!response.getNMTokensFromPreviousAttempts().isEmpty()) {
populateNMTokens(response.getNMTokensFromPreviousAttempts());
}
}
return response;
}
@Override
public AllocateResponse allocate(float progressIndicator)
throws YarnException, IOException {
Preconditions.checkArgument(progressIndicator >= 0,
"Progress indicator should not be negative");
AllocateResponse allocateResponse = null;
List<ResourceRequest> askList = null;
List<ContainerId> releaseList = null;
AllocateRequest allocateRequest = null;
List<String> blacklistToAdd = new ArrayList<String>();
List<String> blacklistToRemove = new ArrayList<String>();
Map<ContainerId, SimpleEntry<Container, Resource>> oldChange =
new HashMap<>();
try {
synchronized (this) {
askList = new ArrayList<ResourceRequest>(ask.size());
for(ResourceRequest r : ask) {
// create a copy of ResourceRequest as we might change it while the
// RPC layer is using it to send info across
askList.add(ResourceRequest.newInstance(r.getPriority(),
r.getResourceName(), r.getCapability(), r.getNumContainers(),
r.getRelaxLocality(), r.getNodeLabelExpression()));
}
List<ContainerResourceChangeRequest> increaseList = new ArrayList<>();
List<ContainerResourceChangeRequest> decreaseList = new ArrayList<>();
// Save the current change for recovery
oldChange.putAll(change);
for (Map.Entry<ContainerId, SimpleEntry<Container, Resource>> entry :
change.entrySet()) {
Container container = entry.getValue().getKey();
Resource original = container.getResource();
Resource target = entry.getValue().getValue();
if (Resources.fitsIn(target, original)) {
// This is a decrease request
decreaseList.add(ContainerResourceChangeRequest.newInstance(
container.getId(), target));
} else {
// This is an increase request
increaseList.add(ContainerResourceChangeRequest.newInstance(
container.getId(), target));
}
}
releaseList = new ArrayList<ContainerId>(release);
// optimistically clear this collection assuming no RPC failure
ask.clear();
release.clear();
change.clear();
blacklistToAdd.addAll(blacklistAdditions);
blacklistToRemove.addAll(blacklistRemovals);
ResourceBlacklistRequest blacklistRequest =
ResourceBlacklistRequest.newInstance(blacklistToAdd,
blacklistToRemove);
allocateRequest =
AllocateRequest.newInstance(lastResponseId, progressIndicator,
askList, releaseList, blacklistRequest,
increaseList, decreaseList);
// clear blacklistAdditions and blacklistRemovals before
// unsynchronized part
blacklistAdditions.clear();
blacklistRemovals.clear();
}
try {
allocateResponse = rmClient.allocate(allocateRequest);
} catch (ApplicationMasterNotRegisteredException e) {
LOG.warn("ApplicationMaster is out of sync with ResourceManager,"
+ " hence resyncing.");
synchronized (this) {
release.addAll(this.pendingRelease);
blacklistAdditions.addAll(this.blacklistedNodes);
for (Map<String, TreeMap<Resource, ResourceRequestInfo>> rr : remoteRequestsTable
.values()) {
for (Map<Resource, ResourceRequestInfo> capabalities : rr.values()) {
for (ResourceRequestInfo request : capabalities.values()) {
addResourceRequestToAsk(request.remoteRequest);
}
}
}
change.putAll(this.pendingChange);
}
// re register with RM
registerApplicationMaster();
allocateResponse = allocate(progressIndicator);
return allocateResponse;
}
synchronized (this) {
// update these on successful RPC
clusterNodeCount = allocateResponse.getNumClusterNodes();
lastResponseId = allocateResponse.getResponseId();
clusterAvailableResources = allocateResponse.getAvailableResources();
if (!allocateResponse.getNMTokens().isEmpty()) {
populateNMTokens(allocateResponse.getNMTokens());
}
if (allocateResponse.getAMRMToken() != null) {
updateAMRMToken(allocateResponse.getAMRMToken());
}
if (!pendingRelease.isEmpty()
&& !allocateResponse.getCompletedContainersStatuses().isEmpty()) {
removePendingReleaseRequests(allocateResponse
.getCompletedContainersStatuses());
}
if (!pendingChange.isEmpty()) {
List<ContainerStatus> completed =
allocateResponse.getCompletedContainersStatuses();
List<Container> changed = new ArrayList<>();
changed.addAll(allocateResponse.getIncreasedContainers());
changed.addAll(allocateResponse.getDecreasedContainers());
// remove all pending change requests that belong to the completed
// containers
for (ContainerStatus status : completed) {
ContainerId containerId = status.getContainerId();
pendingChange.remove(containerId);
}
// remove all pending change requests that have been satisfied
if (!changed.isEmpty()) {
removePendingChangeRequests(changed);
}
}
}
} finally {
// TODO how to differentiate remote yarn exception vs error in rpc
if(allocateResponse == null) {
// we hit an exception in allocate()
// preserve ask and release for next call to allocate()
synchronized (this) {
release.addAll(releaseList);
// requests could have been added or deleted during call to allocate
// If requests were added/removed then there is nothing to do since
// the ResourceRequest object in ask would have the actual new value.
// If ask does not have this ResourceRequest then it was unchanged and
// so we can add the value back safely.
// This assumes that there will no concurrent calls to allocate() and
// so we dont have to worry about ask being changed in the
// synchronized block at the beginning of this method.
for(ResourceRequest oldAsk : askList) {
if(!ask.contains(oldAsk)) {
ask.add(oldAsk);
}
}
// change requests could have been added during the allocate call.
// Those are the newest requests which take precedence
// over requests cached in the oldChange map.
//
// Only insert entries from the cached oldChange map
// that do not exist in the current change map:
for (Map.Entry<ContainerId, SimpleEntry<Container, Resource>> entry :
oldChange.entrySet()) {
ContainerId oldContainerId = entry.getKey();
Container oldContainer = entry.getValue().getKey();
Resource oldResource = entry.getValue().getValue();
if (change.get(oldContainerId) == null) {
change.put(
oldContainerId, new SimpleEntry<>(oldContainer, oldResource));
}
}
blacklistAdditions.addAll(blacklistToAdd);
blacklistRemovals.addAll(blacklistToRemove);
}
}
}
return allocateResponse;
}
protected void removePendingReleaseRequests(
List<ContainerStatus> completedContainersStatuses) {
for (ContainerStatus containerStatus : completedContainersStatuses) {
pendingRelease.remove(containerStatus.getContainerId());
}
}
protected void removePendingChangeRequests(
List<Container> changedContainers) {
for (Container changedContainer : changedContainers) {
ContainerId containerId = changedContainer.getId();
if (pendingChange.get(containerId) == null) {
continue;
}
if (LOG.isDebugEnabled()) {
LOG.debug("RM has confirmed changed resource allocation for "
+ "container " + containerId + ". Current resource allocation:"
+ changedContainer.getResource()
+ ". Remove pending change request:"
+ pendingChange.get(containerId).getValue());
}
pendingChange.remove(containerId);
}
}
@Private
@VisibleForTesting
protected void populateNMTokens(List<NMToken> nmTokens) {
for (NMToken token : nmTokens) {
String nodeId = token.getNodeId().toString();
if (getNMTokenCache().containsToken(nodeId)) {
LOG.info("Replacing token for : " + nodeId);
} else {
LOG.info("Received new token for : " + nodeId);
}
getNMTokenCache().setToken(nodeId, token.getToken());
}
}
@Override
public void unregisterApplicationMaster(FinalApplicationStatus appStatus,
String appMessage, String appTrackingUrl) throws YarnException,
IOException {
Preconditions.checkArgument(appStatus != null,
"AppStatus should not be null.");
FinishApplicationMasterRequest request =
FinishApplicationMasterRequest.newInstance(appStatus, appMessage,
appTrackingUrl);
try {
while (true) {
FinishApplicationMasterResponse response =
rmClient.finishApplicationMaster(request);
if (response.getIsUnregistered()) {
break;
}
LOG.info("Waiting for application to be successfully unregistered.");
Thread.sleep(100);
}
} catch (InterruptedException e) {
LOG.info("Interrupted while waiting for application"
+ " to be removed from RMStateStore");
} catch (ApplicationMasterNotRegisteredException e) {
LOG.warn("ApplicationMaster is out of sync with ResourceManager,"
+ " hence resyncing.");
// re register with RM
registerApplicationMaster();
unregisterApplicationMaster(appStatus, appMessage, appTrackingUrl);
}
}
@Override
public synchronized void addContainerRequest(T req) {
Preconditions.checkArgument(req != null,
"Resource request can not be null.");
Set<String> dedupedRacks = new HashSet<String>();
if (req.getRacks() != null) {
dedupedRacks.addAll(req.getRacks());
if(req.getRacks().size() != dedupedRacks.size()) {
Joiner joiner = Joiner.on(',');
LOG.warn("ContainerRequest has duplicate racks: "
+ joiner.join(req.getRacks()));
}
}
Set<String> inferredRacks = resolveRacks(req.getNodes());
inferredRacks.removeAll(dedupedRacks);
// check that specific and non-specific requests cannot be mixed within a
// priority
checkLocalityRelaxationConflict(req.getPriority(), ANY_LIST,
req.getRelaxLocality());
// check that specific rack cannot be mixed with specific node within a
// priority. If node and its rack are both specified then they must be
// in the same request.
// For explicitly requested racks, we set locality relaxation to true
checkLocalityRelaxationConflict(req.getPriority(), dedupedRacks, true);
checkLocalityRelaxationConflict(req.getPriority(), inferredRacks,
req.getRelaxLocality());
// check if the node label expression specified is valid
checkNodeLabelExpression(req);
if (req.getNodes() != null) {
HashSet<String> dedupedNodes = new HashSet<String>(req.getNodes());
if(dedupedNodes.size() != req.getNodes().size()) {
Joiner joiner = Joiner.on(',');
LOG.warn("ContainerRequest has duplicate nodes: "
+ joiner.join(req.getNodes()));
}
for (String node : dedupedNodes) {
addResourceRequest(req.getPriority(), node, req.getCapability(), req,
true, req.getNodeLabelExpression());
}
}
for (String rack : dedupedRacks) {
addResourceRequest(req.getPriority(), rack, req.getCapability(), req,
true, req.getNodeLabelExpression());
}
// Ensure node requests are accompanied by requests for
// corresponding rack
for (String rack : inferredRacks) {
addResourceRequest(req.getPriority(), rack, req.getCapability(), req,
req.getRelaxLocality(), req.getNodeLabelExpression());
}
// Off-switch
addResourceRequest(req.getPriority(), ResourceRequest.ANY,
req.getCapability(), req, req.getRelaxLocality(), req.getNodeLabelExpression());
}
@Override
public synchronized void removeContainerRequest(T req) {
Preconditions.checkArgument(req != null,
"Resource request can not be null.");
Set<String> allRacks = new HashSet<String>();
if (req.getRacks() != null) {
allRacks.addAll(req.getRacks());
}
allRacks.addAll(resolveRacks(req.getNodes()));
// Update resource requests
if (req.getNodes() != null) {
for (String node : new HashSet<String>(req.getNodes())) {
decResourceRequest(req.getPriority(), node, req.getCapability(), req);
}
}
for (String rack : allRacks) {
decResourceRequest(req.getPriority(), rack, req.getCapability(), req);
}
decResourceRequest(req.getPriority(), ResourceRequest.ANY,
req.getCapability(), req);
}
@Override
public synchronized void requestContainerResourceChange(
Container container, Resource capability) {
validateContainerResourceChangeRequest(
container.getId(), container.getResource(), capability);
if (change.get(container.getId()) == null) {
change.put(container.getId(),
new SimpleEntry<>(container, capability));
} else {
change.get(container.getId()).setValue(capability);
}
if (pendingChange.get(container.getId()) == null) {
pendingChange.put(container.getId(),
new SimpleEntry<>(container, capability));
} else {
pendingChange.get(container.getId()).setValue(capability);
}
}
@Override
public synchronized void releaseAssignedContainer(ContainerId containerId) {
Preconditions.checkArgument(containerId != null,
"ContainerId can not be null.");
pendingRelease.add(containerId);
release.add(containerId);
pendingChange.remove(containerId);
}
@Override
public synchronized Resource getAvailableResources() {
return clusterAvailableResources;
}
@Override
public synchronized int getClusterNodeCount() {
return clusterNodeCount;
}
@Override
public synchronized List<? extends Collection<T>> getMatchingRequests(
Priority priority,
String resourceName,
Resource capability) {
Preconditions.checkArgument(capability != null,
"The Resource to be requested should not be null ");
Preconditions.checkArgument(priority != null,
"The priority at which to request containers should not be null ");
List<LinkedHashSet<T>> list = new LinkedList<LinkedHashSet<T>>();
Map<String, TreeMap<Resource, ResourceRequestInfo>> remoteRequests =
this.remoteRequestsTable.get(priority);
if (remoteRequests == null) {
return list;
}
TreeMap<Resource, ResourceRequestInfo> reqMap = remoteRequests
.get(resourceName);
if (reqMap == null) {
return list;
}
ResourceRequestInfo resourceRequestInfo = reqMap.get(capability);
if (resourceRequestInfo != null &&
!resourceRequestInfo.containerRequests.isEmpty()) {
list.add(resourceRequestInfo.containerRequests);
return list;
}
// no exact match. Container may be larger than what was requested.
// get all resources <= capability. map is reverse sorted.
SortedMap<Resource, ResourceRequestInfo> tailMap =
reqMap.tailMap(capability);
for(Map.Entry<Resource, ResourceRequestInfo> entry : tailMap.entrySet()) {
if (canFit(entry.getKey(), capability) &&
!entry.getValue().containerRequests.isEmpty()) {
// match found that fits in the larger resource
list.add(entry.getValue().containerRequests);
}
}
// no match found
return list;
}
private Set<String> resolveRacks(List<String> nodes) {
Set<String> racks = new HashSet<String>();
if (nodes != null) {
for (String node : nodes) {
// Ensure node requests are accompanied by requests for
// corresponding rack
String rack = RackResolver.resolve(node).getNetworkLocation();
if (rack == null) {
LOG.warn("Failed to resolve rack for node " + node + ".");
} else {
racks.add(rack);
}
}
}
return racks;
}
/**
* ContainerRequests with locality relaxation cannot be made at the same
* priority as ContainerRequests without locality relaxation.
*/
private void checkLocalityRelaxationConflict(Priority priority,
Collection<String> locations, boolean relaxLocality) {
Map<String, TreeMap<Resource, ResourceRequestInfo>> remoteRequests =
this.remoteRequestsTable.get(priority);
if (remoteRequests == null) {
return;
}
// Locality relaxation will be set to relaxLocality for all implicitly
// requested racks. Make sure that existing rack requests match this.
for (String location : locations) {
TreeMap<Resource, ResourceRequestInfo> reqs =
remoteRequests.get(location);
if (reqs != null && !reqs.isEmpty()) {
boolean existingRelaxLocality =
reqs.values().iterator().next().remoteRequest.getRelaxLocality();
if (relaxLocality != existingRelaxLocality) {
throw new InvalidContainerRequestException("Cannot submit a "
+ "ContainerRequest asking for location " + location
+ " with locality relaxation " + relaxLocality + " when it has "
+ "already been requested with locality relaxation " + existingRelaxLocality);
}
}
}
}
/**
* Valid if a node label expression specified on container request is valid or
* not
*
* @param containerRequest
*/
private void checkNodeLabelExpression(T containerRequest) {
String exp = containerRequest.getNodeLabelExpression();
if (null == exp || exp.isEmpty()) {
return;
}
// Don't support specifying >= 2 node labels in a node label expression now
if (exp.contains("&&") || exp.contains("||")) {
throw new InvalidContainerRequestException(
"Cannot specify more than two node labels"
+ " in a single node label expression");
}
// Don't allow specify node label against ANY request
if ((containerRequest.getRacks() != null &&
(!containerRequest.getRacks().isEmpty()))
||
(containerRequest.getNodes() != null &&
(!containerRequest.getNodes().isEmpty()))) {
throw new InvalidContainerRequestException(
"Cannot specify node label with rack and node");
}
}
private void validateContainerResourceChangeRequest(
ContainerId containerId, Resource original, Resource target) {
Preconditions.checkArgument(containerId != null,
"ContainerId cannot be null");
Preconditions.checkArgument(original != null,
"Original resource capability cannot be null");
Preconditions.checkArgument(!Resources.equals(Resources.none(), original)
&& Resources.fitsIn(Resources.none(), original),
"Original resource capability must be greater than 0");
Preconditions.checkArgument(target != null,
"Target resource capability cannot be null");
Preconditions.checkArgument(!Resources.equals(Resources.none(), target)
&& Resources.fitsIn(Resources.none(), target),
"Target resource capability must be greater than 0");
}
private void addResourceRequestToAsk(ResourceRequest remoteRequest) {
// This code looks weird but is needed because of the following scenario.
// A ResourceRequest is removed from the remoteRequestTable. A 0 container
// request is added to 'ask' to notify the RM about not needing it any more.
// Before the call to allocate, the user now requests more containers. If
// the locations of the 0 size request and the new request are the same
// (with the difference being only container count), then the set comparator
// will consider both to be the same and not add the new request to ask. So
// we need to check for the "same" request being present and remove it and
// then add it back. The comparator is container count agnostic.
// This should happen only rarely but we do need to guard against it.
if(ask.contains(remoteRequest)) {
ask.remove(remoteRequest);
}
ask.add(remoteRequest);
}
private void
addResourceRequest(Priority priority, String resourceName,
Resource capability, T req, boolean relaxLocality,
String labelExpression) {
Map<String, TreeMap<Resource, ResourceRequestInfo>> remoteRequests =
this.remoteRequestsTable.get(priority);
if (remoteRequests == null) {
remoteRequests =
new HashMap<String, TreeMap<Resource, ResourceRequestInfo>>();
this.remoteRequestsTable.put(priority, remoteRequests);
if (LOG.isDebugEnabled()) {
LOG.debug("Added priority=" + priority);
}
}
TreeMap<Resource, ResourceRequestInfo> reqMap =
remoteRequests.get(resourceName);
if (reqMap == null) {
// capabilities are stored in reverse sorted order. smallest last.
reqMap = new TreeMap<Resource, ResourceRequestInfo>(
new ResourceReverseMemoryThenCpuComparator());
remoteRequests.put(resourceName, reqMap);
}
ResourceRequestInfo resourceRequestInfo = reqMap.get(capability);
if (resourceRequestInfo == null) {
resourceRequestInfo =
new ResourceRequestInfo(priority, resourceName, capability,
relaxLocality);
reqMap.put(capability, resourceRequestInfo);
}
resourceRequestInfo.remoteRequest.setNumContainers(
resourceRequestInfo.remoteRequest.getNumContainers() + 1);
if (relaxLocality) {
resourceRequestInfo.containerRequests.add(req);
}
if (ResourceRequest.ANY.equals(resourceName)) {
resourceRequestInfo.remoteRequest.setNodeLabelExpression(labelExpression);
}
// Note this down for next interaction with ResourceManager
addResourceRequestToAsk(resourceRequestInfo.remoteRequest);
if (LOG.isDebugEnabled()) {
LOG.debug("addResourceRequest:" + " applicationId="
+ " priority=" + priority.getPriority()
+ " resourceName=" + resourceName + " numContainers="
+ resourceRequestInfo.remoteRequest.getNumContainers()
+ " #asks=" + ask.size());
}
}
private void decResourceRequest(Priority priority,
String resourceName,
Resource capability,
T req) {
Map<String, TreeMap<Resource, ResourceRequestInfo>> remoteRequests =
this.remoteRequestsTable.get(priority);
if(remoteRequests == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Not decrementing resource as priority " + priority
+ " is not present in request table");
}
return;
}
Map<Resource, ResourceRequestInfo> reqMap = remoteRequests.get(resourceName);
if (reqMap == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Not decrementing resource as " + resourceName
+ " is not present in request table");
}
return;
}
ResourceRequestInfo resourceRequestInfo = reqMap.get(capability);
if (LOG.isDebugEnabled()) {
LOG.debug("BEFORE decResourceRequest:" + " applicationId="
+ " priority=" + priority.getPriority()
+ " resourceName=" + resourceName + " numContainers="
+ resourceRequestInfo.remoteRequest.getNumContainers()
+ " #asks=" + ask.size());
}
resourceRequestInfo.remoteRequest.setNumContainers(
resourceRequestInfo.remoteRequest.getNumContainers() - 1);
resourceRequestInfo.containerRequests.remove(req);
if(resourceRequestInfo.remoteRequest.getNumContainers() < 0) {
// guard against spurious removals
resourceRequestInfo.remoteRequest.setNumContainers(0);
}
// send the ResourceRequest to RM even if is 0 because it needs to override
// a previously sent value. If ResourceRequest was not sent previously then
// sending 0 aught to be a no-op on RM
addResourceRequestToAsk(resourceRequestInfo.remoteRequest);
// delete entries from map if no longer needed
if (resourceRequestInfo.remoteRequest.getNumContainers() == 0) {
reqMap.remove(capability);
if (reqMap.size() == 0) {
remoteRequests.remove(resourceName);
}
if (remoteRequests.size() == 0) {
remoteRequestsTable.remove(priority);
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("AFTER decResourceRequest:" + " applicationId="
+ " priority=" + priority.getPriority()
+ " resourceName=" + resourceName + " numContainers="
+ resourceRequestInfo.remoteRequest.getNumContainers()
+ " #asks=" + ask.size());
}
}
@Override
public synchronized void updateBlacklist(List<String> blacklistAdditions,
List<String> blacklistRemovals) {
if (blacklistAdditions != null) {
this.blacklistAdditions.addAll(blacklistAdditions);
this.blacklistedNodes.addAll(blacklistAdditions);
// if some resources are also in blacklistRemovals updated before, we
// should remove them here.
this.blacklistRemovals.removeAll(blacklistAdditions);
}
if (blacklistRemovals != null) {
this.blacklistRemovals.addAll(blacklistRemovals);
this.blacklistedNodes.removeAll(blacklistRemovals);
// if some resources are in blacklistAdditions before, we should remove
// them here.
this.blacklistAdditions.removeAll(blacklistRemovals);
}
if (blacklistAdditions != null && blacklistRemovals != null
&& blacklistAdditions.removeAll(blacklistRemovals)) {
// we allow resources to appear in addition list and removal list in the
// same invocation of updateBlacklist(), but should get a warn here.
LOG.warn("The same resources appear in both blacklistAdditions and " +
"blacklistRemovals in updateBlacklist.");
}
}
private void updateAMRMToken(Token token) throws IOException {
org.apache.hadoop.security.token.Token<AMRMTokenIdentifier> amrmToken =
new org.apache.hadoop.security.token.Token<AMRMTokenIdentifier>(token
.getIdentifier().array(), token.getPassword().array(), new Text(
token.getKind()), new Text(token.getService()));
// Preserve the token service sent by the RM when adding the token
// to ensure we replace the previous token setup by the RM.
// Afterwards we can update the service address for the RPC layer.
UserGroupInformation currentUGI = UserGroupInformation.getCurrentUser();
currentUGI.addToken(amrmToken);
amrmToken.setService(ClientRMProxy.getAMRMTokenService(getConfig()));
}
}
| |
/*
* Copyright (C) 2012 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.CollectPreconditions.checkRemove;
import static com.google.common.collect.Hashing.smearedHash;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.AbstractSet;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.NoSuchElementException;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
/**
* CompactHashSet is an implementation of a Set. All optional operations (adding and removing) are
* supported. The elements can be any objects.
*
* <p>{@code contains(x)}, {@code add(x)} and {@code remove(x)}, are all (expected and amortized)
* constant time operations. Expected in the hashtable sense (depends on the hash function doing a
* good job of distributing the elements to the buckets to a distribution not far from uniform), and
* amortized since some operations can trigger a hash table resize.
*
* <p>Unlike {@code java.util.HashSet}, iteration is only proportional to the actual {@code size()},
* which is optimal, and <i>not</i> the size of the internal hashtable, which could be much larger
* than {@code size()}. Furthermore, this structure only depends on a fixed number of arrays; {@code
* add(x)} operations <i>do not</i> create objects for the garbage collector to deal with, and for
* every element added, the garbage collector will have to traverse {@code 1.5} references on
* average, in the marking phase, not {@code 5.0} as in {@code java.util.HashSet}.
*
* <p>If there are no removals, then {@link #iterator iteration} order is the same as insertion
* order. Any removal invalidates any ordering guarantees.
*
* @author Dimitris Andreou
*/
@GwtIncompatible // not worth using in GWT for now
class CompactHashSet<E> extends AbstractSet<E> implements Serializable {
// TODO(user): cache all field accesses in local vars
/** Creates an empty {@code CompactHashSet} instance. */
public static <E> CompactHashSet<E> create() {
return new CompactHashSet<E>();
}
/**
* Creates a <i>mutable</i> {@code CompactHashSet} instance containing the elements of the given
* collection in unspecified order.
*
* @param collection the elements that the set should contain
* @return a new {@code CompactHashSet} containing those elements (minus duplicates)
*/
public static <E> CompactHashSet<E> create(Collection<? extends E> collection) {
CompactHashSet<E> set = createWithExpectedSize(collection.size());
set.addAll(collection);
return set;
}
/**
* Creates a <i>mutable</i> {@code CompactHashSet} instance containing the given elements in
* unspecified order.
*
* @param elements the elements that the set should contain
* @return a new {@code CompactHashSet} containing those elements (minus duplicates)
*/
public static <E> CompactHashSet<E> create(E... elements) {
CompactHashSet<E> set = createWithExpectedSize(elements.length);
Collections.addAll(set, elements);
return set;
}
/**
* Creates a {@code CompactHashSet} instance, with a high enough "initial capacity" that it
* <i>should</i> hold {@code expectedSize} elements without growth.
*
* @param expectedSize the number of elements you expect to add to the returned set
* @return a new, empty {@code CompactHashSet} with enough capacity to hold {@code expectedSize}
* elements without resizing
* @throws IllegalArgumentException if {@code expectedSize} is negative
*/
public static <E> CompactHashSet<E> createWithExpectedSize(int expectedSize) {
return new CompactHashSet<E>(expectedSize);
}
private static final int MAXIMUM_CAPACITY = 1 << 30;
// TODO(user): decide, and inline, load factor. 0.75?
private static final float DEFAULT_LOAD_FACTOR = 1.0f;
/** Bitmask that selects the low 32 bits. */
private static final long NEXT_MASK = (1L << 32) - 1;
/** Bitmask that selects the high 32 bits. */
private static final long HASH_MASK = ~NEXT_MASK;
// TODO(user): decide default size
private static final int DEFAULT_SIZE = 3;
static final int UNSET = -1;
/**
* The hashtable. Its values are indexes to both the elements and entries arrays.
*
* <p>Currently, the UNSET value means "null pointer", and any non negative value x is the actual
* index.
*
* <p>Its size must be a power of two.
*/
private transient int[] table;
/**
* Contains the logical entries, in the range of [0, size()). The high 32 bits of each long is the
* smeared hash of the element, whereas the low 32 bits is the "next" pointer (pointing to the
* next entry in the bucket chain). The pointers in [size(), entries.length) are all "null"
* (UNSET).
*/
private transient long[] entries;
/** The elements contained in the set, in the range of [0, size()). */
transient Object[] elements;
/** The load factor. */
transient float loadFactor;
/**
* Keeps track of modifications of this set, to make it possible to throw
* ConcurrentModificationException in the iterator. Note that we choose not to make this volatile,
* so we do less of a "best effort" to track such errors, for better performance.
*/
transient int modCount;
/** When we have this many elements, resize the hashtable. */
private transient int threshold;
/** The number of elements contained in the set. */
private transient int size;
/** Constructs a new empty instance of {@code CompactHashSet}. */
CompactHashSet() {
init(DEFAULT_SIZE, DEFAULT_LOAD_FACTOR);
}
/**
* Constructs a new instance of {@code CompactHashSet} with the specified capacity.
*
* @param expectedSize the initial capacity of this {@code CompactHashSet}.
*/
CompactHashSet(int expectedSize) {
init(expectedSize, DEFAULT_LOAD_FACTOR);
}
/** Pseudoconstructor for serialization support. */
void init(int expectedSize, float loadFactor) {
Preconditions.checkArgument(expectedSize >= 0, "Initial capacity must be non-negative");
Preconditions.checkArgument(loadFactor > 0, "Illegal load factor");
int buckets = Hashing.closedTableSize(expectedSize, loadFactor);
this.table = newTable(buckets);
this.loadFactor = loadFactor;
this.elements = new Object[expectedSize];
this.entries = newEntries(expectedSize);
this.threshold = Math.max(1, (int) (buckets * loadFactor));
}
private static int[] newTable(int size) {
int[] array = new int[size];
Arrays.fill(array, UNSET);
return array;
}
private static long[] newEntries(int size) {
long[] array = new long[size];
Arrays.fill(array, UNSET);
return array;
}
private static int getHash(long entry) {
return (int) (entry >>> 32);
}
/** Returns the index, or UNSET if the pointer is "null" */
private static int getNext(long entry) {
return (int) entry;
}
/** Returns a new entry value by changing the "next" index of an existing entry */
private static long swapNext(long entry, int newNext) {
return (HASH_MASK & entry) | (NEXT_MASK & newNext);
}
private int hashTableMask() {
return table.length - 1;
}
@CanIgnoreReturnValue
@Override
public boolean add(@NullableDecl E object) {
long[] entries = this.entries;
Object[] elements = this.elements;
int hash = smearedHash(object);
int tableIndex = hash & hashTableMask();
int newEntryIndex = this.size; // current size, and pointer to the entry to be appended
int next = table[tableIndex];
if (next == UNSET) { // uninitialized bucket
table[tableIndex] = newEntryIndex;
} else {
int last;
long entry;
do {
last = next;
entry = entries[next];
if (getHash(entry) == hash && Objects.equal(object, elements[next])) {
return false;
}
next = getNext(entry);
} while (next != UNSET);
entries[last] = swapNext(entry, newEntryIndex);
}
if (newEntryIndex == Integer.MAX_VALUE) {
throw new IllegalStateException("Cannot contain more than Integer.MAX_VALUE elements!");
}
int newSize = newEntryIndex + 1;
resizeMeMaybe(newSize);
insertEntry(newEntryIndex, object, hash);
this.size = newSize;
if (newEntryIndex >= threshold) {
resizeTable(2 * table.length);
}
modCount++;
return true;
}
/**
* Creates a fresh entry with the specified object at the specified position in the entry arrays.
*/
void insertEntry(int entryIndex, E object, int hash) {
this.entries[entryIndex] = ((long) hash << 32) | (NEXT_MASK & UNSET);
this.elements[entryIndex] = object;
}
/** Returns currentSize + 1, after resizing the entries storage if necessary. */
private void resizeMeMaybe(int newSize) {
int entriesSize = entries.length;
if (newSize > entriesSize) {
int newCapacity = entriesSize + Math.max(1, entriesSize >>> 1);
if (newCapacity < 0) {
newCapacity = Integer.MAX_VALUE;
}
if (newCapacity != entriesSize) {
resizeEntries(newCapacity);
}
}
}
/**
* Resizes the internal entries array to the specified capacity, which may be greater or less than
* the current capacity.
*/
void resizeEntries(int newCapacity) {
this.elements = Arrays.copyOf(elements, newCapacity);
long[] entries = this.entries;
int oldSize = entries.length;
entries = Arrays.copyOf(entries, newCapacity);
if (newCapacity > oldSize) {
Arrays.fill(entries, oldSize, newCapacity, UNSET);
}
this.entries = entries;
}
private void resizeTable(int newCapacity) { // newCapacity always a power of two
int[] oldTable = table;
int oldCapacity = oldTable.length;
if (oldCapacity >= MAXIMUM_CAPACITY) {
threshold = Integer.MAX_VALUE;
return;
}
int newThreshold = 1 + (int) (newCapacity * loadFactor);
int[] newTable = newTable(newCapacity);
long[] entries = this.entries;
int mask = newTable.length - 1;
for (int i = 0; i < size; i++) {
long oldEntry = entries[i];
int hash = getHash(oldEntry);
int tableIndex = hash & mask;
int next = newTable[tableIndex];
newTable[tableIndex] = i;
entries[i] = ((long) hash << 32) | (NEXT_MASK & next);
}
this.threshold = newThreshold;
this.table = newTable;
}
@Override
public boolean contains(@NullableDecl Object object) {
int hash = smearedHash(object);
int next = table[hash & hashTableMask()];
while (next != UNSET) {
long entry = entries[next];
if (getHash(entry) == hash && Objects.equal(object, elements[next])) {
return true;
}
next = getNext(entry);
}
return false;
}
@CanIgnoreReturnValue
@Override
public boolean remove(@NullableDecl Object object) {
return remove(object, smearedHash(object));
}
@CanIgnoreReturnValue
private boolean remove(Object object, int hash) {
int tableIndex = hash & hashTableMask();
int next = table[tableIndex];
if (next == UNSET) {
return false;
}
int last = UNSET;
do {
if (getHash(entries[next]) == hash && Objects.equal(object, elements[next])) {
if (last == UNSET) {
// we need to update the root link from table[]
table[tableIndex] = getNext(entries[next]);
} else {
// we need to update the link from the chain
entries[last] = swapNext(entries[last], getNext(entries[next]));
}
moveEntry(next);
size--;
modCount++;
return true;
}
last = next;
next = getNext(entries[next]);
} while (next != UNSET);
return false;
}
/**
* Moves the last entry in the entry array into {@code dstIndex}, and nulls out its old position.
*/
void moveEntry(int dstIndex) {
int srcIndex = size() - 1;
if (dstIndex < srcIndex) {
// move last entry to deleted spot
elements[dstIndex] = elements[srcIndex];
elements[srcIndex] = null;
// move the last entry to the removed spot, just like we moved the element
long lastEntry = entries[srcIndex];
entries[dstIndex] = lastEntry;
entries[srcIndex] = UNSET;
// also need to update whoever's "next" pointer was pointing to the last entry place
// reusing "tableIndex" and "next"; these variables were no longer needed
int tableIndex = getHash(lastEntry) & hashTableMask();
int lastNext = table[tableIndex];
if (lastNext == srcIndex) {
// we need to update the root pointer
table[tableIndex] = dstIndex;
} else {
// we need to update a pointer in an entry
int previous;
long entry;
do {
previous = lastNext;
lastNext = getNext(entry = entries[lastNext]);
} while (lastNext != srcIndex);
// here, entries[previous] points to the old entry location; update it
entries[previous] = swapNext(entry, dstIndex);
}
} else {
elements[dstIndex] = null;
entries[dstIndex] = UNSET;
}
}
int firstEntryIndex() {
return isEmpty() ? -1 : 0;
}
int getSuccessor(int entryIndex) {
return (entryIndex + 1 < size) ? entryIndex + 1 : -1;
}
/**
* Updates the index an iterator is pointing to after a call to remove: returns the index of the
* entry that should be looked at after a removal on indexRemoved, with indexBeforeRemove as the
* index that *was* the next entry that would be looked at.
*/
int adjustAfterRemove(int indexBeforeRemove, @SuppressWarnings("unused") int indexRemoved) {
return indexBeforeRemove - 1;
}
@Override
public Iterator<E> iterator() {
return new Iterator<E>() {
int expectedModCount = modCount;
int index = firstEntryIndex();
int indexToRemove = -1;
@Override
public boolean hasNext() {
return index >= 0;
}
@Override
@SuppressWarnings("unchecked")
public E next() {
checkForConcurrentModification();
if (!hasNext()) {
throw new NoSuchElementException();
}
indexToRemove = index;
E result = (E) elements[index];
index = getSuccessor(index);
return result;
}
@Override
public void remove() {
checkForConcurrentModification();
checkRemove(indexToRemove >= 0);
expectedModCount++;
CompactHashSet.this.remove(elements[indexToRemove], getHash(entries[indexToRemove]));
index = adjustAfterRemove(index, indexToRemove);
indexToRemove = -1;
}
private void checkForConcurrentModification() {
if (modCount != expectedModCount) {
throw new ConcurrentModificationException();
}
}
};
}
@Override
public int size() {
return size;
}
@Override
public boolean isEmpty() {
return size == 0;
}
@Override
public Object[] toArray() {
return Arrays.copyOf(elements, size);
}
@CanIgnoreReturnValue
@Override
public <T> T[] toArray(T[] a) {
return ObjectArrays.toArrayImpl(elements, 0, size, a);
}
/**
* Ensures that this {@code CompactHashSet} has the smallest representation in memory, given its
* current size.
*/
public void trimToSize() {
int size = this.size;
if (size < entries.length) {
resizeEntries(size);
}
// size / loadFactor gives the table size of the appropriate load factor,
// but that may not be a power of two. We floor it to a power of two by
// keeping its highest bit. But the smaller table may have a load factor
// larger than what we want; then we want to go to the next power of 2 if we can
int minimumTableSize = Math.max(1, Integer.highestOneBit((int) (size / loadFactor)));
if (minimumTableSize < MAXIMUM_CAPACITY) {
double load = (double) size / minimumTableSize;
if (load > loadFactor) {
minimumTableSize <<= 1; // increase to next power if possible
}
}
if (minimumTableSize < table.length) {
resizeTable(minimumTableSize);
}
}
@Override
public void clear() {
modCount++;
Arrays.fill(elements, 0, size, null);
Arrays.fill(table, UNSET);
Arrays.fill(entries, UNSET);
this.size = 0;
}
/**
* The serial form currently mimics Android's java.util.HashSet version, e.g. see
* http://omapzoom.org/?p=platform/libcore.git;a=blob;f=luni/src/main/java/java/util/HashSet.java
*/
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
stream.writeInt(size);
for (E e : this) {
stream.writeObject(e);
}
}
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException {
stream.defaultReadObject();
init(DEFAULT_SIZE, DEFAULT_LOAD_FACTOR);
int elementCount = stream.readInt();
for (int i = elementCount; --i >= 0; ) {
E element = (E) stream.readObject();
add(element);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.broker.admin.v3;
import com.google.common.collect.Sets;
import org.apache.bookkeeper.mledger.impl.PositionImpl;
import org.apache.pulsar.broker.auth.MockedPulsarServiceBaseTest;
import org.apache.pulsar.broker.transaction.pendingack.impl.MLPendingAckStore;
import org.apache.pulsar.client.api.Consumer;
import org.apache.pulsar.client.api.Message;
import org.apache.pulsar.client.api.MessageId;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.PulsarClient;
import org.apache.pulsar.client.api.Schema;
import org.apache.pulsar.client.api.transaction.Transaction;
import org.apache.pulsar.client.api.transaction.TxnID;
import org.apache.pulsar.client.impl.BatchMessageIdImpl;
import org.apache.pulsar.client.impl.MessageIdImpl;
import org.apache.pulsar.client.impl.transaction.TransactionImpl;
import org.apache.pulsar.common.naming.NamespaceName;
import org.apache.pulsar.common.naming.TopicDomain;
import org.apache.pulsar.common.naming.TopicName;
import org.apache.pulsar.common.policies.data.ClusterData;
import org.apache.pulsar.common.policies.data.ManagedLedgerInternalStats;
import org.apache.pulsar.common.policies.data.TenantInfoImpl;
import org.apache.pulsar.common.policies.data.TransactionBufferStats;
import org.apache.pulsar.common.policies.data.TransactionCoordinatorInternalStats;
import org.apache.pulsar.common.policies.data.TransactionCoordinatorStats;
import org.apache.pulsar.common.policies.data.TransactionPendingAckInternalStats;
import org.apache.pulsar.common.policies.data.TransactionPendingAckStats;
import org.apache.pulsar.common.policies.data.TransactionInBufferStats;
import org.apache.pulsar.common.policies.data.TransactionInPendingAckStats;
import org.apache.pulsar.common.policies.data.TransactionMetadata;
import org.apache.pulsar.packages.management.core.MockedPackagesStorageProvider;
import org.apache.pulsar.transaction.coordinator.impl.MLTransactionLogImpl;
import org.awaitility.Awaitility;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public class AdminApiTransactionTest extends MockedPulsarServiceBaseTest {
@BeforeMethod
@Override
protected void setup() throws Exception {
conf.setEnablePackagesManagement(true);
conf.setPackagesManagementStorageProvider(MockedPackagesStorageProvider.class.getName());
conf.setTransactionCoordinatorEnabled(true);
conf.setSystemTopicEnabled(true);
conf.setTransactionBufferSnapshotMaxTransactionCount(1);
super.internalSetup();
admin.clusters().createCluster("test", ClusterData.builder().serviceUrl(pulsar.getWebServiceAddress()).build());
TenantInfoImpl tenantInfo = new TenantInfoImpl(Sets.newHashSet("role1", "role2"), Sets.newHashSet("test"));
admin.tenants().createTenant("pulsar", tenantInfo);
admin.namespaces().createNamespace("pulsar/system", Sets.newHashSet("test"));
admin.tenants().createTenant("public", tenantInfo);
admin.namespaces().createNamespace("public/default", Sets.newHashSet("test"));
}
@AfterMethod(alwaysRun = true)
@Override
protected void cleanup() throws Exception {
super.internalCleanup();
}
@Test(timeOut = 20000)
public void testGetTransactionCoordinatorStats() throws Exception {
initTransaction(2);
getTransaction().commit().get();
getTransaction().abort().get();
TransactionCoordinatorStats transactionCoordinatorstats =
admin.transactions().getCoordinatorStatsByIdAsync(1).get();
verifyCoordinatorStats(transactionCoordinatorstats.state,
transactionCoordinatorstats.leastSigBits, transactionCoordinatorstats.lowWaterMark);
transactionCoordinatorstats = admin.transactions().getCoordinatorStatsByIdAsync(0).get();
verifyCoordinatorStats(transactionCoordinatorstats.state,
transactionCoordinatorstats.leastSigBits, transactionCoordinatorstats.lowWaterMark);
Map<Integer, TransactionCoordinatorStats> stats = admin.transactions().getCoordinatorStatsAsync().get();
assertEquals(stats.size(), 2);
transactionCoordinatorstats = stats.get(0);
verifyCoordinatorStats(transactionCoordinatorstats.state,
transactionCoordinatorstats.leastSigBits, transactionCoordinatorstats.lowWaterMark);
transactionCoordinatorstats = stats.get(1);
verifyCoordinatorStats(transactionCoordinatorstats.state,
transactionCoordinatorstats.leastSigBits, transactionCoordinatorstats.lowWaterMark);
}
@Test(timeOut = 20000)
public void testGetTransactionInBufferStats() throws Exception {
initTransaction(2);
TransactionImpl transaction = (TransactionImpl) getTransaction();
final String topic = "persistent://public/default/testGetTransactionInBufferStats";
admin.topics().createNonPartitionedTopic(topic);
Producer<byte[]> producer = pulsarClient.newProducer(Schema.BYTES).topic(topic).sendTimeout(0, TimeUnit.SECONDS).create();
MessageId messageId = producer.newMessage(transaction).value("Hello pulsar!".getBytes()).send();
TransactionInBufferStats transactionInBufferStats = admin.transactions()
.getTransactionInBufferStatsAsync(new TxnID(transaction.getTxnIdMostBits(),
transaction.getTxnIdLeastBits()), topic).get();
PositionImpl position =
PositionImpl.get(((MessageIdImpl) messageId).getLedgerId(), ((MessageIdImpl) messageId).getEntryId());
assertEquals(transactionInBufferStats.startPosition, position.toString());
assertFalse(transactionInBufferStats.aborted);
transaction.abort().get();
transactionInBufferStats = admin.transactions()
.getTransactionInBufferStatsAsync(new TxnID(transaction.getTxnIdMostBits(),
transaction.getTxnIdLeastBits()), topic).get();
assertNull(transactionInBufferStats.startPosition);
assertTrue(transactionInBufferStats.aborted);
}
@Test(timeOut = 20000)
public void testGetTransactionPendingAckStats() throws Exception {
initTransaction(2);
final String topic = "persistent://public/default/testGetTransactionInBufferStats";
final String subName = "test";
admin.topics().createNonPartitionedTopic(topic);
Producer<byte[]> producer = pulsarClient.newProducer(Schema.BYTES).topic(topic).create();
Consumer<byte[]> consumer = pulsarClient.newConsumer(Schema.BYTES).topic(topic)
.subscriptionName(subName).subscribe();
producer.sendAsync("Hello pulsar!".getBytes());
producer.sendAsync("Hello pulsar!".getBytes());
producer.sendAsync("Hello pulsar!".getBytes());
producer.sendAsync("Hello pulsar!".getBytes());
TransactionImpl transaction = (TransactionImpl) getTransaction();
TransactionInPendingAckStats transactionInPendingAckStats = admin.transactions()
.getTransactionInPendingAckStatsAsync(new TxnID(transaction.getTxnIdMostBits(),
transaction.getTxnIdLeastBits()), topic, subName).get();
assertNull(transactionInPendingAckStats.cumulativeAckPosition);
consumer.receive();
consumer.receive();
Message<byte[]> message = consumer.receive();
BatchMessageIdImpl batchMessageId = (BatchMessageIdImpl) message.getMessageId();
consumer.acknowledgeCumulativeAsync(batchMessageId, transaction).get();
transactionInPendingAckStats = admin.transactions()
.getTransactionInPendingAckStatsAsync(new TxnID(transaction.getTxnIdMostBits(),
transaction.getTxnIdLeastBits()), topic, subName).get();
assertEquals(transactionInPendingAckStats.cumulativeAckPosition,
String.valueOf(batchMessageId.getLedgerId()) +
':' +
batchMessageId.getEntryId() +
':' +
batchMessageId.getBatchIndex());
}
@Test(timeOut = 20000)
public void testGetTransactionMetadata() throws Exception {
initTransaction(2);
long currentTime = System.currentTimeMillis();
final String topic1 = "persistent://public/default/testGetTransactionMetadata-1";
final String subName1 = "test1";
final String topic2 = "persistent://public/default/testGetTransactionMetadata-2";
final String subName2 = "test2";
final String subName3 = "test3";
admin.topics().createNonPartitionedTopic(topic1);
admin.topics().createNonPartitionedTopic(topic2);
TransactionImpl transaction = (TransactionImpl) getTransaction();
Producer<byte[]> producer1 = pulsarClient.newProducer(Schema.BYTES)
.sendTimeout(0, TimeUnit.SECONDS).topic(topic1).create();
Producer<byte[]> producer2 = pulsarClient.newProducer(Schema.BYTES)
.sendTimeout(0, TimeUnit.SECONDS).topic(topic2).create();
Consumer<byte[]> consumer1 = pulsarClient.newConsumer(Schema.BYTES).topic(topic1)
.subscriptionName(subName1).subscribe();
Consumer<byte[]> consumer2 = pulsarClient.newConsumer(Schema.BYTES).topic(topic2)
.subscriptionName(subName2).subscribe();
Consumer<byte[]> consumer3 = pulsarClient.newConsumer(Schema.BYTES).topic(topic2)
.subscriptionName(subName3).subscribe();
MessageId messageId1 = producer1.send("Hello pulsar!".getBytes());
MessageId messageId2 = producer2.send("Hello pulsar!".getBytes());
MessageId messageId3 = producer1.newMessage(transaction).value("Hello pulsar!".getBytes()).send();
MessageId messageId4 = producer2.newMessage(transaction).value("Hello pulsar!".getBytes()).send();
consumer1.acknowledgeCumulativeAsync(messageId1, transaction).get();
consumer2.acknowledgeCumulativeAsync(messageId2, transaction).get();
consumer3.acknowledgeCumulativeAsync(messageId2, transaction).get();
TxnID txnID = new TxnID(transaction.getTxnIdMostBits(), transaction.getTxnIdLeastBits());
TransactionMetadata transactionMetadata = admin.transactions()
.getTransactionMetadataAsync(new TxnID(transaction.getTxnIdMostBits(),
transaction.getTxnIdLeastBits())).get();
assertEquals(transactionMetadata.txnId, txnID.toString());
assertTrue(transactionMetadata.openTimestamp > currentTime);
assertEquals(transactionMetadata.timeoutAt, 5000L);
assertEquals(transactionMetadata.status, "OPEN");
Map<String, TransactionInBufferStats> producedPartitions = transactionMetadata.producedPartitions;
Map<String, Map<String, TransactionInPendingAckStats>> ackedPartitions = transactionMetadata.ackedPartitions;
PositionImpl position1 = getPositionByMessageId(messageId1);
PositionImpl position2 = getPositionByMessageId(messageId2);
PositionImpl position3 = getPositionByMessageId(messageId3);
PositionImpl position4 = getPositionByMessageId(messageId4);
assertFalse(producedPartitions.get(topic1).aborted);
assertFalse(producedPartitions.get(topic2).aborted);
assertEquals(producedPartitions.get(topic1).startPosition, position3.toString());
assertEquals(producedPartitions.get(topic2).startPosition, position4.toString());
assertEquals(ackedPartitions.get(topic1).size(), 1);
assertEquals(ackedPartitions.get(topic2).size(), 2);
assertEquals(ackedPartitions.get(topic1).get(subName1).cumulativeAckPosition, position1.toString());
assertEquals(ackedPartitions.get(topic2).get(subName2).cumulativeAckPosition, position2.toString());
assertEquals(ackedPartitions.get(topic2).get(subName3).cumulativeAckPosition, position2.toString());
}
@Test(timeOut = 20000)
public void testGetTransactionBufferStats() throws Exception {
initTransaction(2);
TransactionImpl transaction = (TransactionImpl) getTransaction();
final String topic = "persistent://public/default/testGetTransactionBufferStats";
final String subName1 = "test1";
final String subName2 = "test2";
admin.topics().createNonPartitionedTopic(topic);
Producer<byte[]> producer = pulsarClient.newProducer(Schema.BYTES)
.sendTimeout(0, TimeUnit.SECONDS).topic(topic).create();
Consumer<byte[]> consumer1 = pulsarClient.newConsumer(Schema.BYTES).topic(topic)
.subscriptionName(subName1).subscribe();
Consumer<byte[]> consumer2 = pulsarClient.newConsumer(Schema.BYTES).topic(topic)
.subscriptionName(subName2).subscribe();
long currentTime = System.currentTimeMillis();
MessageId messageId = producer.newMessage(transaction).value("Hello pulsar!".getBytes()).send();
transaction.commit().get();
transaction = (TransactionImpl) getTransaction();
consumer1.acknowledgeAsync(messageId, transaction).get();
consumer2.acknowledgeAsync(messageId, transaction).get();
TransactionBufferStats transactionBufferStats = admin.transactions().
getTransactionBufferStatsAsync(topic).get();
assertEquals(transactionBufferStats.state, "Ready");
assertEquals(transactionBufferStats.maxReadPosition,
PositionImpl.get(((MessageIdImpl) messageId).getLedgerId(),
((MessageIdImpl) messageId).getEntryId() + 1).toString());
assertTrue(transactionBufferStats.lastSnapshotTimestamps > currentTime);
}
@DataProvider(name = "ackType")
public static Object[] ackType() {
return new Object[] { "cumulative", "individual"};
}
@Test(timeOut = 20000, dataProvider = "ackType")
public void testGetPendingAckStats(String ackType) throws Exception {
initTransaction(2);
final String topic = "persistent://public/default/testGetPendingAckStats";
final String subName = "test1";
admin.topics().createNonPartitionedTopic(topic);
Producer<byte[]> producer = pulsarClient.newProducer(Schema.BYTES)
.sendTimeout(0, TimeUnit.SECONDS).topic(topic).create();
Consumer<byte[]> consumer = pulsarClient.newConsumer(Schema.BYTES).topic(topic)
.subscriptionName(subName).subscribe();
TransactionPendingAckStats transactionPendingAckStats = admin.transactions().
getPendingAckStatsAsync(topic, subName).get();
assertEquals(transactionPendingAckStats.state, "None");
producer.newMessage().value("Hello pulsar!".getBytes()).send();
TransactionImpl transaction = (TransactionImpl) getTransaction();
if (ackType.equals("individual")) {
consumer.acknowledgeAsync(consumer.receive().getMessageId(), transaction);
} else {
consumer.acknowledgeCumulativeAsync(consumer.receive().getMessageId(), transaction);
}
transaction.commit().get();
transactionPendingAckStats = admin.transactions().
getPendingAckStatsAsync(topic, subName).get();
assertEquals(transactionPendingAckStats.state, "Ready");
}
@Test(timeOut = 20000)
public void testGetSlowTransactions() throws Exception {
initTransaction(2);
TransactionImpl transaction1 = (TransactionImpl) pulsarClient.newTransaction()
.withTransactionTimeout(60, TimeUnit.SECONDS).build().get();
TransactionImpl transaction2 = (TransactionImpl) pulsarClient.newTransaction()
.withTransactionTimeout(60, TimeUnit.SECONDS).build().get();
pulsarClient.newTransaction().withTransactionTimeout(20, TimeUnit.SECONDS).build();
pulsarClient.newTransaction().withTransactionTimeout(20, TimeUnit.SECONDS).build();
Map<String, TransactionMetadata> transactionMetadataMap = admin.transactions()
.getSlowTransactionsAsync(30, TimeUnit.SECONDS).get();
assertEquals(transactionMetadataMap.size(), 2);
TxnID txnID1 = new TxnID(transaction1.getTxnIdMostBits(), transaction1.getTxnIdLeastBits());
TxnID txnID2 = new TxnID(transaction2.getTxnIdMostBits(), transaction2.getTxnIdLeastBits());
TransactionMetadata transactionMetadata = transactionMetadataMap.get(txnID1.toString());
assertNotNull(transactionMetadata);
assertEquals(transactionMetadata.timeoutAt, 60000);
transactionMetadata = transactionMetadataMap.get(txnID2.toString());
assertNotNull(transactionMetadata);
assertEquals(transactionMetadata.timeoutAt, 60000);
}
private static PositionImpl getPositionByMessageId(MessageId messageId) {
return PositionImpl.get(((MessageIdImpl) messageId).getLedgerId(), ((MessageIdImpl) messageId).getEntryId());
}
@Test(timeOut = 20000)
public void testGetCoordinatorInternalStats() throws Exception {
initTransaction(1);
Transaction transaction = pulsarClient.newTransaction()
.withTransactionTimeout(60, TimeUnit.SECONDS).build().get();
TransactionCoordinatorInternalStats stats = admin.transactions()
.getCoordinatorInternalStatsAsync(0, true).get();
verifyManagedLegerInternalStats(stats.transactionLogStats.managedLedgerInternalStats, 26);
assertEquals(TopicName.get(TopicDomain.persistent.toString(), NamespaceName.SYSTEM_NAMESPACE,
MLTransactionLogImpl.TRANSACTION_LOG_PREFIX + "0").getPersistenceNamingEncoding(),
stats.transactionLogStats.managedLedgerName);
transaction.commit().get();
stats = admin.transactions()
.getCoordinatorInternalStatsAsync(0, false).get();
assertNull(stats.transactionLogStats.managedLedgerInternalStats.ledgers.get(0).metadata);
assertEquals(TopicName.get(TopicDomain.persistent.toString(), NamespaceName.SYSTEM_NAMESPACE,
MLTransactionLogImpl.TRANSACTION_LOG_PREFIX + "0").getPersistenceNamingEncoding(),
stats.transactionLogStats.managedLedgerName);
}
@Test(timeOut = 20000)
public void testGetPendingAckInternalStats() throws Exception {
initTransaction(1);
TransactionImpl transaction = (TransactionImpl) getTransaction();
final String topic = "persistent://public/default/testGetPendingAckInternalStats";
final String subName = "test";
admin.topics().createNonPartitionedTopic(topic);
Producer<byte[]> producer = pulsarClient.newProducer(Schema.BYTES).topic(topic).create();
Consumer<byte[]> consumer = pulsarClient.newConsumer(Schema.BYTES).topic(topic)
.subscriptionName(subName).subscribe();
MessageId messageId = producer.send("Hello pulsar!".getBytes());
consumer.acknowledgeAsync(messageId, transaction).get();
TransactionPendingAckInternalStats stats = admin.transactions()
.getPendingAckInternalStatsAsync(topic, subName, true).get();
ManagedLedgerInternalStats managedLedgerInternalStats = stats.pendingAckLogStats.managedLedgerInternalStats;
assertEquals(TopicName.get(TopicDomain.persistent.toString(), "public", "default",
"testGetPendingAckInternalStats" + "-"
+ subName + MLPendingAckStore.PENDING_ACK_STORE_SUFFIX).getPersistenceNamingEncoding(),
stats.pendingAckLogStats.managedLedgerName);
verifyManagedLegerInternalStats(managedLedgerInternalStats, 16);
ManagedLedgerInternalStats finalManagedLedgerInternalStats = managedLedgerInternalStats;
managedLedgerInternalStats.cursors.forEach((s, cursorStats) -> {
assertEquals(s, MLPendingAckStore.PENDING_ACK_STORE_CURSOR_NAME);
assertEquals(cursorStats.readPosition, finalManagedLedgerInternalStats.lastConfirmedEntry);
});
stats = admin.transactions()
.getPendingAckInternalStatsAsync(topic, subName, false).get();
managedLedgerInternalStats = stats.pendingAckLogStats.managedLedgerInternalStats;
assertEquals(TopicName.get(TopicDomain.persistent.toString(), "public", "default",
"testGetPendingAckInternalStats" + "-"
+ subName + MLPendingAckStore.PENDING_ACK_STORE_SUFFIX).getPersistenceNamingEncoding(),
stats.pendingAckLogStats.managedLedgerName);
assertNull(managedLedgerInternalStats.ledgers.get(0).metadata);
}
private static void verifyCoordinatorStats(String state,
long sequenceId, long lowWaterMark) {
assertEquals(state, "Ready");
assertEquals(sequenceId, 0);
assertEquals(lowWaterMark, 0);
}
private void initTransaction(int coordinatorSize) throws Exception {
admin.topics().createPartitionedTopic(TopicName.TRANSACTION_COORDINATOR_ASSIGN.toString(), coordinatorSize);
admin.lookups().lookupTopic(TopicName.TRANSACTION_COORDINATOR_ASSIGN.toString());
pulsarClient = PulsarClient.builder().serviceUrl(lookupUrl.toString()).enableTransaction(true).build();
pulsarClient.close();
Awaitility.await().until(() ->
pulsar.getTransactionMetadataStoreService().getStores().size() == coordinatorSize);
pulsarClient = PulsarClient.builder().serviceUrl(lookupUrl.toString()).enableTransaction(true).build();
}
private Transaction getTransaction() throws Exception {
return pulsarClient.newTransaction()
.withTransactionTimeout(5, TimeUnit.SECONDS).build().get();
}
private static void verifyManagedLegerInternalStats(ManagedLedgerInternalStats managedLedgerInternalStats,
long totalSize) {
assertEquals(managedLedgerInternalStats.entriesAddedCounter, 1);
assertEquals(managedLedgerInternalStats.numberOfEntries, 1);
assertEquals(managedLedgerInternalStats.totalSize, totalSize);
assertEquals(managedLedgerInternalStats.currentLedgerEntries, 1);
assertEquals(managedLedgerInternalStats.currentLedgerSize, totalSize);
assertNull(managedLedgerInternalStats.lastLedgerCreationFailureTimestamp);
assertEquals(managedLedgerInternalStats.waitingCursorsCount, 0);
assertEquals(managedLedgerInternalStats.pendingAddEntriesCount, 0);
assertNotNull(managedLedgerInternalStats.lastConfirmedEntry);
assertEquals(managedLedgerInternalStats.ledgers.size(), 1);
assertNotNull(managedLedgerInternalStats.ledgers.get(0).metadata);
assertEquals(managedLedgerInternalStats.cursors.size(), 1);
}
}
| |
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.internal.inspector;
import com.google.common.base.MoreObjects;
import com.intellij.icons.AllIcons;
import com.intellij.ide.ui.AntialiasingType;
import com.intellij.ide.ui.UISettings;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationType;
import com.intellij.notification.Notifications;
import com.intellij.notification.NotificationsManager;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.roots.ui.configuration.actions.IconWithTextAction;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.GraphicsConfig;
import com.intellij.openapi.ui.Splitter;
import com.intellij.openapi.ui.StripeTable;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.*;
import com.intellij.ui.border.CustomLineBorder;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.ui.components.JBTextField;
import com.intellij.ui.paint.RectanglePainter;
import com.intellij.ui.speedSearch.SpeedSearchUtil;
import com.intellij.util.ExceptionUtil;
import com.intellij.util.Function;
import com.intellij.util.ObjectUtils;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.*;
import com.intellij.util.ui.tree.TreeUtil;
import net.miginfocom.swing.MigLayout;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.accessibility.Accessible;
import javax.accessibility.AccessibleContext;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.border.CompoundBorder;
import javax.swing.border.LineBorder;
import javax.swing.border.TitledBorder;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.plaf.ColorUIResource;
import javax.swing.plaf.UIResource;
import javax.swing.table.*;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.event.*;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.List;
import static java.util.Locale.ENGLISH;
public class UiInspectorAction extends ToggleAction implements DumbAware {
private static final String CLICK_INFO = "CLICK_INFO";
private static final String RENDERER_BOUNDS = "clicked renderer";
private UiInspector myInspector;
public UiInspectorAction() {
if (Boolean.getBoolean("idea.ui.debug.mode")) {
ApplicationManager.getApplication().invokeLater(() -> setSelected(null, true));
}
}
@Override
public boolean isSelected(AnActionEvent e) {
return myInspector != null;
}
@Override
public void setSelected(AnActionEvent e, boolean state) {
if (state) {
if (myInspector == null) {
myInspector = new UiInspector();
}
UiInspectorNotification[] existing =
NotificationsManager.getNotificationsManager().getNotificationsOfType(UiInspectorNotification.class, null);
if (existing.length == 0) {
Notifications.Bus.notify(new UiInspectorNotification(), null);
}
}
else {
UiInspector inspector = myInspector;
myInspector = null;
if (inspector != null) {
Disposer.dispose(inspector);
}
}
}
private static class UiInspectorNotification extends Notification {
private UiInspectorNotification() {
super(Notifications.SYSTEM_MESSAGES_GROUP_ID, "UI Inspector", "Control-Alt-Click to view component info!",
NotificationType.INFORMATION);
}
}
private static class InspectorWindow extends JDialog {
private InspectorTable myInspectorTable;
private Component myComponent;
private List<PropertyBean> myInfo;
private Component myInitialComponent;
private HighlightComponent myHighlightComponent;
private HierarchyTree myHierarchyTree;
private final JPanel myWrapperPanel;
private InspectorWindow(@NotNull Component component) throws HeadlessException {
super(findWindow(component));
Window window = findWindow(component);
setModal(window instanceof JDialog && ((JDialog)window).isModal());
myComponent = component;
myInitialComponent = component;
getRootPane().setBorder(JBUI.Borders.empty(5));
setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
setLayout(new BorderLayout());
setTitle(component.getClass().getName());
DefaultActionGroup actions = new DefaultActionGroup();
actions.addAction(new IconWithTextAction("Highlight") {
@Override
public void actionPerformed(AnActionEvent e) {
setHighlightingEnabled(myHighlightComponent == null);
}
@Override
public void update(AnActionEvent e) {
e.getPresentation().setEnabled(myInfo != null || (myComponent != null && myComponent.isVisible()));
}
});
actions.addSeparator();
actions.add(new IconWithTextAction("Refresh") {
@Override
public void actionPerformed(AnActionEvent e) {
getCurrentTable().refresh();
}
@Override
public void update(AnActionEvent e) {
e.getPresentation().setEnabled(myComponent != null && myComponent.isVisible());
}
});
ActionToolbar toolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.CONTEXT_TOOLBAR, actions, true);
add(toolbar.getComponent(), BorderLayout.NORTH);
myWrapperPanel = new JPanel(new BorderLayout());
myInspectorTable = new InspectorTable(component);
myHierarchyTree = new HierarchyTree(component) {
@Override
public void onComponentChanged(Component c) {
boolean wasHighlighted = myHighlightComponent != null;
setHighlightingEnabled(false);
switchInfo(c);
setHighlightingEnabled(wasHighlighted);
}
@Override
public void onComponentChanged(List<PropertyBean> info) {
boolean wasHighlighted = myHighlightComponent != null;
setHighlightingEnabled(false);
switchInfo(info);
setHighlightingEnabled(wasHighlighted);
}
};
myWrapperPanel.add(myInspectorTable, BorderLayout.CENTER);
Splitter splitPane = new JBSplitter(false, "UiInspector.splitter.proportion", 0.5f);
splitPane.setSecondComponent(myWrapperPanel);
splitPane.setFirstComponent(new JBScrollPane(myHierarchyTree));
add(splitPane, BorderLayout.CENTER);
myHierarchyTree.expandPath();
addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
close();
}
});
getRootPane().getActionMap().put("CLOSE", new AbstractAction() {
public void actionPerformed(ActionEvent e) {
close();
}
});
setHighlightingEnabled(true);
getRootPane().getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), "CLOSE");
}
private static Window findWindow(Component component) {
DialogWrapper dialogWrapper = DialogWrapper.findInstance(component);
if (dialogWrapper != null) {
return dialogWrapper.getPeer().getWindow();
}
return null;
}
private InspectorTable getCurrentTable() {
return myInspectorTable;
}
private void switchInfo(@Nullable Component c) {
if (c == null) return;
myComponent = c;
myInfo = null;
setTitle(myComponent.getClass().getName());
myWrapperPanel.removeAll();
myInspectorTable = new InspectorTable(c);
myWrapperPanel.add(myInspectorTable, BorderLayout.CENTER);
myWrapperPanel.revalidate();
myWrapperPanel.repaint();
}
private void switchInfo(@NotNull List<PropertyBean> clickInfo) {
myComponent = null;
myInfo = clickInfo;
setTitle("Click Info");
myWrapperPanel.removeAll();
myInspectorTable = new InspectorTable(clickInfo);
myWrapperPanel.add(myInspectorTable, BorderLayout.CENTER);
myWrapperPanel.revalidate();
myWrapperPanel.repaint();
}
@Override
public void dispose() {
super.dispose();
DialogWrapper.cleanupRootPane(rootPane);
DialogWrapper.cleanupWindowListeners(this);
}
public void close() {
if (myInitialComponent instanceof JComponent) {
((JComponent)myInitialComponent).putClientProperty(CLICK_INFO, null);
}
myInfo = null;
setHighlightingEnabled(false);
if (myComponent == null) return;
myComponent = null;
setVisible(false);
dispose();
}
private void setHighlightingEnabled(boolean enable) {
if (myHighlightComponent != null) {
JComponent glassPane = getGlassPane(myHighlightComponent);
if (glassPane != null) {
glassPane.remove(myHighlightComponent);
glassPane.revalidate();
glassPane.repaint();
}
myHighlightComponent = null;
}
if (enable && myComponent != null) {
JComponent glassPane = getGlassPane(myComponent);
if (glassPane != null) {
myHighlightComponent = new HighlightComponent(new JBColor(JBColor.GREEN, JBColor.RED));
Point pt = SwingUtilities.convertPoint(myComponent, new Point(0, 0), glassPane);
myHighlightComponent.setBounds(pt.x, pt.y, myComponent.getWidth(), myComponent.getHeight());
glassPane.add(myHighlightComponent);
glassPane.revalidate();
glassPane.repaint();
}
}
if (enable & myInfo != null && myInitialComponent != null) {
Rectangle bounds = null;
for (PropertyBean bean : myInfo) {
if (RENDERER_BOUNDS.equals(bean.propertyName)) {
bounds = (Rectangle)bean.propertyValue;
break;
}
}
if (bounds != null) {
JComponent glassPane = getGlassPane(myInitialComponent);
if (glassPane != null) {
myHighlightComponent = new HighlightComponent(new JBColor(JBColor.GREEN, JBColor.RED));
bounds = SwingUtilities.convertRectangle(myInitialComponent, bounds, glassPane);
myHighlightComponent.setBounds(bounds);
glassPane.add(myHighlightComponent);
glassPane.revalidate();
glassPane.repaint();
}
}
}
}
@Nullable
private static JComponent getGlassPane(@NotNull Component component) {
JRootPane rootPane = SwingUtilities.getRootPane(component);
return rootPane == null ? null : (JComponent)rootPane.getGlassPane();
}
}
private static class ComponentTreeCellRenderer extends ColoredTreeCellRenderer {
private final Component myInitialSelection;
ComponentTreeCellRenderer(Component initialSelection) {
myInitialSelection = initialSelection;
setFont(JBUI.Fonts.label(11));
setBorder(JBUI.Borders.empty(0, 3));
}
@Override
public void customizeCellRenderer(@NotNull JTree tree,
Object value,
boolean selected,
boolean expanded,
boolean leaf,
int row,
boolean hasFocus) {
Color foreground = selected ? UIUtil.getTreeSelectionForeground() : UIUtil.getTreeForeground();
Color background = selected ? UIUtil.getTreeSelectionBackground() : null;
if (value instanceof HierarchyTree.ComponentNode) {
HierarchyTree.ComponentNode componentNode = (HierarchyTree.ComponentNode)value;
Component component = componentNode.getComponent();
if (!selected) {
if (!component.isVisible()) {
foreground = JBColor.GRAY;
}
else if (component.getWidth() == 0 || component.getHeight() == 0) {
foreground = new JBColor(new Color(128, 10, 0), JBColor.BLUE);
}
else if (component.getPreferredSize() != null &&
(component.getSize().width < component.getPreferredSize().width
|| component.getSize().height < component.getPreferredSize().height)) {
foreground = PlatformColors.BLUE;
}
if (myInitialSelection == componentNode.getComponent()) {
background = new Color(31, 128, 8, 58);
}
}
append(getComponentName(component));
append(": " + RectangleRenderer.toString(component.getBounds()), SimpleTextAttributes.GRAYED_ATTRIBUTES);
if (component.isOpaque()) {
append(", opaque", SimpleTextAttributes.GRAYED_ATTRIBUTES);
}
if (component.isDoubleBuffered()) {
append(", double-buffered", SimpleTextAttributes.GRAYED_ATTRIBUTES);
}
componentNode.setText(toString());
setIcon(createColorIcon(component.getForeground(), component.getBackground()));
}
if (value instanceof HierarchyTree.ClickInfoNode) {
append(value.toString());
setIcon(AllIcons.Ide.Rating);
}
setForeground(foreground);
setBackground(background);
SpeedSearchUtil.applySpeedSearchHighlighting(tree, this, false, selected);
}
}
@NotNull
private static String getComponentName(Component component) {
String name = getClassName(component);
String componentName = component.getName();
if (StringUtil.isNotEmpty(componentName)) {
name += " \"" + componentName + "\"";
}
return name;
}
private static TreeModel buildModel(Component c) {
Component parent = c.getParent();
while (parent != null) {
c = parent;
parent = c.getParent();//Find root window
}
return new DefaultTreeModel(new UiInspectorAction.HierarchyTree.ComponentNode(c));
}
private abstract static class HierarchyTree extends JTree implements TreeSelectionListener {
final Component myComponent;
private HierarchyTree(Component c) {
myComponent = c;
setModel(buildModel(c));
setCellRenderer(new ComponentTreeCellRenderer(c));
getSelectionModel().addTreeSelectionListener(this);
new TreeSpeedSearch(this);
if (((JComponent)c).getClientProperty(CLICK_INFO) != null) {
SwingUtilities.invokeLater(() -> getSelectionModel().setSelectionPath(getPathForRow(getLeadSelectionRow() + 1)));
}
}
public void expandPath() {
TreeUtil.expandAll(this);
int count = getRowCount();
ComponentNode node = new ComponentNode(myComponent);
for (int i = 0; i < count; i++) {
TreePath row = getPathForRow(i);
if (row.getLastPathComponent().equals(node)) {
setSelectionPath(row);
scrollPathToVisible(getSelectionPath());
break;
}
}
}
@Override
public void valueChanged(TreeSelectionEvent e) {
TreePath path = e.getNewLeadSelectionPath();
if (path == null) {
onComponentChanged((Component)null);
return;
}
Object component = path.getLastPathComponent();
if (component instanceof ComponentNode) {
Component c = ((ComponentNode)component).getComponent();
onComponentChanged(c);
}
if (component instanceof ClickInfoNode) {
onComponentChanged(((ClickInfoNode)component).getInfo());
}
}
public abstract void onComponentChanged(List<PropertyBean> info);
public abstract void onComponentChanged(Component c);
private static class ComponentNode extends DefaultMutableTreeNode {
private final Component myComponent;
String myText;
private ComponentNode(@NotNull Component component) {
super(component);
myComponent = component;
children = prepareChildren(myComponent);
}
Component getComponent() {
return myComponent;
}
@Override
public String toString() {
return myText != null ? myText : myComponent.getClass().getName();
}
public void setText(String value) {
myText = value;
}
@Override
public boolean equals(Object obj) {
return obj instanceof ComponentNode && ((ComponentNode)obj).getComponent() == getComponent();
}
@SuppressWarnings("UseOfObsoleteCollectionType")
private static Vector prepareChildren(Component parent) {
Vector<DefaultMutableTreeNode> result = new Vector<>();
if (parent instanceof JComponent) {
Object o = ((JComponent)parent).getClientProperty(CLICK_INFO);
if (o instanceof List) {
//noinspection unchecked
result.add(new ClickInfoNode((List<PropertyBean>)o));
}
}
if (parent instanceof Container) {
for (Component component : ((Container)parent).getComponents()) {
result.add(new ComponentNode(component));
}
}
if (parent instanceof Window) {
Window[] children = ((Window)parent).getOwnedWindows();
for (Window child : children) {
if (child instanceof InspectorWindow) continue;
result.add(new ComponentNode(child));
}
}
return result;
}
}
private static class ClickInfoNode extends DefaultMutableTreeNode {
private final List<PropertyBean> myInfo;
public ClickInfoNode(List<PropertyBean> info) {
myInfo = info;
}
@Override
public String toString() {
return "Clicked Info";
}
public List<PropertyBean> getInfo() {
return myInfo;
}
@Override
public boolean isLeaf() {
return true;
}
}
}
private static class HighlightComponent extends JComponent {
Color myColor;
private HighlightComponent(@NotNull final Color c) {
myColor = c;
}
@Override
protected void paintComponent(Graphics g) {
Graphics2D g2d = (Graphics2D)g;
Color oldColor = g2d.getColor();
Composite old = g2d.getComposite();
g2d.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.2f));
Rectangle r = getBounds();
RectanglePainter.paint(g2d, 0, 0, r.width, r.height, 0, myColor, myColor.darker());
g2d.setComposite(old);
g2d.setColor(oldColor);
}
}
private static class InspectorTable extends JPanel {
InspectorTableModel myModel;
DimensionsComponent myDimensionComponent;
private InspectorTable(@NotNull final List<PropertyBean> clickInfo) {
myModel = new InspectorTableModel(clickInfo);
init(null);
}
private InspectorTable(@NotNull final Component component) {
myModel = new InspectorTableModel(component);
init(component);
}
private void init(@Nullable Component component) {
setLayout(new BorderLayout());
StripeTable table = new StripeTable(myModel);
new TableSpeedSearch(table);
TableColumnModel columnModel = table.getColumnModel();
TableColumn propertyColumn = columnModel.getColumn(0);
propertyColumn.setMinWidth(JBUI.scale(200));
propertyColumn.setMaxWidth(JBUI.scale(200));
propertyColumn.setResizable(false);
propertyColumn.setCellRenderer(new PropertyNameRenderer());
TableColumn valueColumn = columnModel.getColumn(1);
valueColumn.setMinWidth(JBUI.scale(200));
valueColumn.setResizable(false);
valueColumn.setCellRenderer(new ValueCellRenderer());
valueColumn.setCellEditor(new DefaultCellEditor(new JBTextField()) {
@Override
public Component getTableCellEditorComponent(JTable table, Object value, boolean isSelected, int row, int column) {
Component comp = table.getCellRenderer(row, column).getTableCellRendererComponent(table, value, false, false, row, column);
if (comp instanceof JLabel) {
value = ((JLabel)comp).getText();
}
Component result = super.getTableCellEditorComponent(table, value, isSelected, row, column);
((JComponent)result).setBorder(BorderFactory.createLineBorder(JBColor.GRAY, 1));
return result;
}
@Override
public Object getCellEditorValue() {
return super.getCellEditorValue();
}
});
table.setAutoResizeMode(JTable.AUTO_RESIZE_LAST_COLUMN);
add(new JBScrollPane(table), BorderLayout.CENTER);
if (component != null) {
myDimensionComponent = new DimensionsComponent(component);
add(myDimensionComponent, BorderLayout.SOUTH);
}
}
public void refresh() {
myModel.refresh();
myDimensionComponent.update();
myDimensionComponent.repaint();
}
private class PropertyNameRenderer extends DefaultTableCellRenderer {
@Override
public Component getTableCellRendererComponent(JTable table,
Object value,
boolean isSelected,
boolean hasFocus,
int row,
int column) {
super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
final TableModel model = table.getModel();
boolean changed = false;
if (model instanceof InspectorTableModel) {
changed = ((InspectorTableModel)model).myProperties.get(row).changed;
}
final Color fg = isSelected ? table.getSelectionForeground() : changed ? UI.getColor("link.foreground") : table.getForeground();
final JBFont font = JBUI.Fonts.label();
setFont(changed ? font.asBold() : font);
setForeground(fg);
return this;
}
}
}
private static class DimensionsComponent extends JComponent {
Component myComponent;
int myWidth;
int myHeight;
Border myBorder;
Insets myInsets;
private DimensionsComponent(@NotNull final Component component) {
myComponent = component;
setOpaque(true);
setBackground(JBColor.WHITE);
setBorder(JBUI.Borders.empty(5, 0));
setFont(JBUI.Fonts.label(9));
update();
}
public void update() {
myWidth = myComponent.getWidth();
myHeight = myComponent.getHeight();
if (myComponent instanceof JComponent) {
myBorder = ((JComponent)myComponent).getBorder();
myInsets = ((JComponent)myComponent).getInsets();
}
}
@Override
protected void paintComponent(final Graphics g) {
Graphics2D g2d = (Graphics2D)g;
GraphicsConfig config = new GraphicsConfig(g).setAntialiasing(UISettings.getShadowInstance().getIdeAAType() != AntialiasingType.OFF);
Rectangle bounds = getBounds();
g2d.setColor(getBackground());
Insets insets = getInsets();
g2d.fillRect(insets.left, insets.top, bounds.width - insets.left - insets.right, bounds.height - insets.top - insets.bottom);
final String sizeString = String.format("%d x %d", myWidth, myHeight);
FontMetrics fm = g2d.getFontMetrics();
int sizeWidth = fm.stringWidth(sizeString);
int fontHeight = fm.getHeight();
int innerBoxWidthGap = JBUI.scale(20);
int innerBoxHeightGap = JBUI.scale(5);
int boxSize = JBUI.scale(15);
int centerX = bounds.width / 2;
int centerY = bounds.height / 2;
int innerX = centerX - sizeWidth / 2 - innerBoxWidthGap;
int innerY = centerY - fontHeight / 2 - innerBoxHeightGap;
int innerWidth = sizeWidth + innerBoxWidthGap * 2;
int innerHeight = fontHeight + innerBoxHeightGap * 2;
g2d.setColor(getForeground());
drawCenteredString(g2d, fm, fontHeight, sizeString, centerX, centerY);
g2d.setColor(JBColor.GRAY);
g2d.drawRect(innerX, innerY, innerWidth, innerHeight);
Insets borderInsets = null;
if (myBorder != null) borderInsets = myBorder.getBorderInsets(myComponent);
UIUtil.drawDottedRectangle(g2d, innerX - boxSize, innerY - boxSize, innerX + innerWidth + boxSize, innerY + innerHeight + boxSize);
drawInsets(g2d, fm, "border", borderInsets, boxSize, fontHeight, innerX, innerY, innerWidth, innerHeight);
g2d.drawRect(innerX - boxSize * 2, innerY - boxSize * 2, innerWidth + boxSize * 4, innerHeight + boxSize * 4);
drawInsets(g2d, fm, "insets", myInsets, boxSize * 2, fontHeight, innerX, innerY, innerWidth, innerHeight);
config.restore();
}
private static void drawInsets(Graphics2D g2d, FontMetrics fm, String name, Insets insets, int offset, int fontHeight, int innerX, int innerY, int innerWidth, int innerHeight) {
g2d.setColor(JBColor.BLACK);
g2d.drawString(name, innerX - offset + JBUI.scale(5), innerY - offset + fontHeight);
g2d.setColor(JBColor.GRAY);
int outerX = innerX - offset;
int outerWidth = innerWidth + offset * 2;
int outerY = innerY - offset;
int outerHeight = innerHeight + offset * 2;
final String top = insets != null ? Integer.toString(insets.top) : "-";
final String bottom = insets != null ? Integer.toString(insets.bottom) : "-";
final String left = insets != null ? Integer.toString(insets.left) : "-";
final String right = insets != null ? Integer.toString(insets.right) : "-";
int shift = JBUI.scale(7);
drawCenteredString(g2d, fm, fontHeight, top,
outerX + outerWidth / 2,
outerY + shift);
drawCenteredString(g2d, fm, fontHeight, bottom,
outerX + outerWidth / 2,
outerY + outerHeight - shift);
drawCenteredString(g2d, fm, fontHeight, left,
outerX + shift,
outerY + outerHeight / 2);
drawCenteredString(g2d, fm, fontHeight, right,
outerX + outerWidth - shift,
outerY + outerHeight / 2);
}
@Override
public Dimension getMinimumSize() {
return JBUI.size(120);
}
@Override
public Dimension getPreferredSize() {
return JBUI.size(150);
}
}
private static void drawCenteredString(Graphics2D g2d, FontMetrics fm, int fontHeight, String text, int x, int y) {
int width = fm.stringWidth(text);
UIUtil.drawCenteredString(g2d, new Rectangle(x - width / 2, y - fontHeight / 2, width, fontHeight), text);
}
private static class ValueCellRenderer implements TableCellRenderer {
private static final Map<Class, Renderer> RENDERERS = ContainerUtil.newHashMap();
static {
RENDERERS.put(Point.class, new PointRenderer());
RENDERERS.put(Dimension.class, new DimensionRenderer());
RENDERERS.put(Insets.class, new InsetsRenderer());
RENDERERS.put(Rectangle.class, new RectangleRenderer());
RENDERERS.put(Color.class, new ColorRenderer());
RENDERERS.put(Font.class, new FontRenderer());
RENDERERS.put(Boolean.class, new BooleanRenderer());
RENDERERS.put(Icon.class, new IconRenderer());
RENDERERS.put(Border.class, new BorderRenderer());
}
private static final Renderer<Object> DEFAULT_RENDERER = new ObjectRenderer();
private static final JLabel NULL_RENDERER = new JLabel("-");
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
if (value == null) {
NULL_RENDERER.setOpaque(isSelected);
NULL_RENDERER.setForeground(isSelected ? table.getSelectionForeground() : table.getForeground());
NULL_RENDERER.setBackground(isSelected ? table.getSelectionBackground() : table.getBackground());
return NULL_RENDERER;
}
Renderer<Object> renderer = ObjectUtils.notNull(getRenderer(value.getClass()), DEFAULT_RENDERER);
JComponent result = renderer.setValue(value);
result.setOpaque(isSelected);
result.setForeground(isSelected ? table.getSelectionForeground() : table.getForeground());
result.setBackground(isSelected ? table.getSelectionBackground() : table.getBackground());
return result;
}
@Nullable
private static Renderer<Object> getRenderer(Class clazz) {
if (clazz == null) return null;
Renderer<Object> renderer = (Renderer<Object>)RENDERERS.get(clazz);
if (renderer != null) return renderer;
Class[] interfaces = clazz.getInterfaces();
for (Class aClass : interfaces) {
renderer = getRenderer(aClass);
if (renderer != null) {
return renderer;
}
}
clazz = clazz.getSuperclass();
if (clazz != null) {
return getRenderer(clazz);
}
return null;
}
}
private interface Renderer<T> {
JComponent setValue(@NotNull T value);
}
private static class PointRenderer extends JLabel implements Renderer<Point> {
public JComponent setValue(@NotNull final Point value) {
setText(String.valueOf(value.x) + ':' + value.y);
return this;
}
}
private static class DimensionRenderer extends JLabel implements Renderer<Dimension> {
public JComponent setValue(@NotNull final Dimension value) {
setText(String.valueOf(value.width) + "x" + value.height);
return this;
}
}
private static class InsetsRenderer extends JLabel implements Renderer<Insets> {
public JComponent setValue(@NotNull final Insets value) {
setText("top: " + value.top + " left:" + value.left + " bottom:" + value.bottom + " right:" + value.right);
return this;
}
}
private static class RectangleRenderer extends JLabel implements Renderer<Rectangle> {
public JComponent setValue(@NotNull final Rectangle value) {
setText(toString(value));
return this;
}
@NotNull
static String toString(@NotNull Rectangle r) {
return r.width + "x" + r.height + " @ " + r.x + ":" + r.y;
}
}
private static class ColorRenderer extends JLabel implements Renderer<Color> {
public JComponent setValue(@NotNull final Color value) {
StringBuilder sb = new StringBuilder();
sb.append(" r:").append(value.getRed());
sb.append(" g:").append(value.getGreen());
sb.append(" b:").append(value.getBlue());
sb.append(" a:").append(value.getAlpha());
sb.append(" argb:0x");
String hex = Integer.toHexString(value.getRGB());
for (int i = hex.length(); i < 8; i++) sb.append('0');
sb.append(hex.toUpperCase(ENGLISH));
if (value instanceof UIResource) sb.append(" UIResource");
setText(sb.toString());
setIcon(createColorIcon(value));
return this;
}
}
private static class FontRenderer extends JLabel implements Renderer<Font> {
public JComponent setValue(@NotNull final Font value) {
StringBuilder sb = new StringBuilder();
sb.append(value.getFontName()).append(" (").append(value.getFamily()).append("), ").append(value.getSize()).append("px");
if (Font.BOLD == (Font.BOLD & value.getStyle())) sb.append(" bold");
if (Font.ITALIC == (Font.ITALIC & value.getStyle())) sb.append(" italic");
if (value instanceof UIResource) sb.append(" UIResource");
setText(sb.toString());
return this;
}
}
private static class BooleanRenderer extends JLabel implements Renderer<Boolean> {
public JComponent setValue(@NotNull final Boolean value) {
setText(value ? "Yes" : "No");
return this;
}
}
private static class IconRenderer extends JLabel implements Renderer<Icon> {
public JComponent setValue(@NotNull final Icon value) {
setIcon(value);
setText(getToStringValue(value));
return this;
}
}
private static class BorderRenderer extends JLabel implements Renderer<Border> {
public JComponent setValue(@NotNull final Border value) {
setText(getTextDescription(value));
if (value instanceof CompoundBorder) {
Color insideColor = getBorderColor(((CompoundBorder)value).getInsideBorder());
Color outsideColor = getBorderColor(((CompoundBorder)value).getOutsideBorder());
if (insideColor != null && outsideColor != null) {
setIcon(createColorIcon(outsideColor, insideColor));
}
else if (insideColor != null) {
setIcon(createColorIcon(insideColor));
}
else if (outsideColor != null) {
setIcon(createColorIcon(outsideColor));
}
else {
setIcon(null);
}
}
else {
Color color = getBorderColor(value);
setIcon(color != null ? createColorIcon(color) : null);
}
return this;
}
@Nullable
private static Color getBorderColor(@NotNull Border value) {
if (value instanceof LineBorder) {
return ((LineBorder)value).getLineColor();
}
else if (value instanceof CustomLineBorder) {
try {
return (Color)ReflectionUtil.findField(CustomLineBorder.class, Color.class, "myColor").get(value);
}
catch (Exception ignore) {
}
}
return null;
}
@NotNull
private static String getTextDescription(@NotNull Border value) {
StringBuilder sb = new StringBuilder();
sb.append(getClassName(value));
Color color = getBorderColor(value);
if (color != null) sb.append(" color=").append(color.toString());
if (value instanceof LineBorder) {
if (((LineBorder)value).getRoundedCorners()) sb.append(" roundedCorners=true");
}
if (value instanceof TitledBorder) {
sb.append(" title='").append(((TitledBorder)value).getTitle()).append("'");
}
if (value instanceof CompoundBorder) {
sb.append(" inside={").append(getTextDescription(((CompoundBorder)value).getInsideBorder())).append("}");
sb.append(" outside={").append(getTextDescription(((CompoundBorder)value).getOutsideBorder())).append("}");
}
if (value instanceof UIResource) sb.append(" UIResource");
sb.append(" (").append(getToStringValue(value)).append(")");
return sb.toString();
}
}
private static class ObjectRenderer extends JLabel implements Renderer<Object> {
{
putClientProperty("html.disable", Boolean.TRUE);
}
public JComponent setValue(@NotNull final Object value) {
setText(getToStringValue(value));
return this;
}
}
@NotNull
private static String getToStringValue(@NotNull Object value) {
String toString = StringUtil.notNullize(String.valueOf(value), "toString()==null");
return toString.replace('\n', ' ');
}
@NotNull
private static String getClassName(Object value) {
Class<?> clazz0 = value.getClass();
Class<?> clazz = clazz0.isAnonymousClass() ? clazz0.getSuperclass() : clazz0;
return clazz.getSimpleName();
}
private static ColorIcon createColorIcon(Color color) {
return JBUI.scale(new ColorIcon(13, 11, color, true));
}
private static Icon createColorIcon(Color color1, Color color2) {
return JBUI.scale(new TwoColorsIcon(11, color1, color2));
}
private static class PropertyBean {
final String propertyName;
final Object propertyValue;
final boolean changed;
PropertyBean(String name, Object value) {
this(name, value, false);
}
PropertyBean(String name, Object value, boolean changed) {
propertyName = name;
propertyValue = value;
this.changed = changed;
}
}
private static class InspectorTableModel extends AbstractTableModel {
final List<String> PROPERTIES = Arrays.asList(
"ui", "getLocation", "getLocationOnScreen",
"getSize", "isOpaque", "getBorder",
"getForeground", "getBackground", "getFont",
"getCellRenderer", "getCellEditor",
"getMinimumSize", "getMaximumSize", "getPreferredSize",
"getText", "isEditable", "getIcon",
"getVisibleRect", "getLayout",
"getAlignmentX", "getAlignmentY",
"getTooltipText", "getToolTipText",
"isShowing", "isEnabled", "isVisible", "isDoubleBuffered",
"isFocusable", "isFocusCycleRoot", "isFocusOwner",
"isValid", "isDisplayable", "isLightweight"
);
final List<String> CHECKERS = Arrays.asList(
"isForegroundSet", "isBackgroundSet", "isFontSet",
"isMinimumSizeSet", "isMaximumSizeSet", "isPreferredSizeSet"
);
final List<String> ACCESSIBLE_CONTEXT_PROPERTIES = Arrays.asList(
"getAccessibleRole", "getAccessibleName", "getAccessibleDescription",
"getAccessibleAction", "getAccessibleChildrenCount",
"getAccessibleIndexInParent", "getAccessibleRelationSet",
"getAccessibleStateSet", "getAccessibleEditableText",
"getAccessibleTable", "getAccessibleText",
"getAccessibleValue", "accessibleChangeSupport"
);
final Component myComponent;
final List<PropertyBean> myProperties = ContainerUtil.newArrayList();
InspectorTableModel(@NotNull List<PropertyBean> clickInfo) {
myComponent = null;
myProperties.addAll(clickInfo);
}
InspectorTableModel(@NotNull Component c) {
myComponent = c;
fillTable();
}
void fillTable() {
addProperties("", myComponent, PROPERTIES);
Object addedAt = myComponent instanceof JComponent ? ((JComponent)myComponent).getClientProperty("uiInspector.addedAt") : null;
myProperties.add(new PropertyBean("added-at", addedAt));
// Add properties related to Accessibility support. This is useful for manually
// inspecting what kind (if any) of accessibility support components expose.
boolean isAccessible = myComponent instanceof Accessible;
myProperties.add(new PropertyBean("accessible", isAccessible));
AccessibleContext context = myComponent.getAccessibleContext();
myProperties.add(new PropertyBean("accessibleContext", context));
if (isAccessible) {
addProperties(" ", myComponent.getAccessibleContext(), ACCESSIBLE_CONTEXT_PROPERTIES);
}
if (myComponent instanceof Container) {
addLayoutProperties((Container)myComponent);
}
}
private void addProperties(@NotNull String prefix, @NotNull Object component, @NotNull List<String> methodNames) {
Class<?> clazz0 = component.getClass();
Class<?> clazz = clazz0.isAnonymousClass() ? clazz0.getSuperclass() : clazz0;
myProperties.add(new PropertyBean(prefix + "class", clazz.getName()));
StringBuilder classHierarchy = new StringBuilder();
for (Class<?> cl = clazz.getSuperclass(); cl != null; cl = cl.getSuperclass()) {
if (classHierarchy.length() > 0) classHierarchy.append(" -> ");
classHierarchy.append(cl.getName());
if (JComponent.class.getName().equals(cl.getName())) break;
}
myProperties.add(new PropertyBean(prefix + "hierarchy", classHierarchy.toString()));
for (String name: methodNames) {
String propertyName = ObjectUtils.notNull(StringUtil.getPropertyName(name), name);
Object propertyValue;
try {
try {
//noinspection ConstantConditions
propertyValue = ReflectionUtil.findMethod(Arrays.asList(clazz.getMethods()), name).invoke(component);
}
catch (Exception e) {
propertyValue = ReflectionUtil.findField(clazz, null, name).get(component);
}
boolean changed = false;
try {
final String checkerMethodName = "is" + StringUtil.capitalize(propertyName) + "Set";
if (CHECKERS.contains(checkerMethodName)) {
final Object value = ReflectionUtil.findMethod(Arrays.asList(clazz.getMethods()), checkerMethodName).invoke(component);
if (value instanceof Boolean) {
changed = ((Boolean)value).booleanValue();
}
}
} catch (Exception e) {changed = false;}
myProperties.add(new PropertyBean(prefix + propertyName, propertyValue, changed));
}
catch (Exception ignored) {
}
}
}
private void addLayoutProperties(@NotNull Container component) {
String prefix = " ";
LayoutManager layout = component.getLayout();
if (layout instanceof GridBagLayout) {
GridBagLayout bagLayout = (GridBagLayout)layout;
GridBagConstraints defaultConstraints = ReflectionUtil.getField(GridBagLayout.class, bagLayout, GridBagConstraints.class, "defaultConstraints");
myProperties.add(new PropertyBean("GridBagLayout constraints",
String.format("defaultConstraints - %s", toString(defaultConstraints))));
if (bagLayout.columnWidths != null) myProperties.add(new PropertyBean(prefix + "columnWidths", Arrays.toString(bagLayout.columnWidths)));
if (bagLayout.rowHeights != null) myProperties.add(new PropertyBean(prefix + "rowHeights", Arrays.toString(bagLayout.rowHeights)));
if (bagLayout.columnWeights != null) myProperties.add(new PropertyBean(prefix + "columnWeights", Arrays.toString(bagLayout.columnWeights)));
if (bagLayout.rowWeights != null) myProperties.add(new PropertyBean(prefix + "rowWeights", Arrays.toString(bagLayout.rowWeights)));
for (Component child : component.getComponents()) {
myProperties.add(new PropertyBean(prefix + getComponentName(child), toString(bagLayout.getConstraints(child))));
}
}
else if (layout instanceof BorderLayout) {
BorderLayout borderLayout = (BorderLayout)layout;
myProperties.add(new PropertyBean("BorderLayout constraints",
String.format("hgap - %s, vgap - %s", borderLayout.getHgap(), borderLayout.getVgap())));
for (Component child : component.getComponents()) {
myProperties.add(new PropertyBean(prefix + getComponentName(child), borderLayout.getConstraints(child)));
}
}
else if (layout instanceof CardLayout) {
CardLayout cardLayout = (CardLayout)layout;
Integer currentCard = ReflectionUtil.getField(CardLayout.class, cardLayout, null, "currentCard");
//noinspection UseOfObsoleteCollectionType
Vector vector = ReflectionUtil.getField(CardLayout.class, cardLayout, Vector.class, "vector");
String cardDescription = "???";
if (vector != null && currentCard != null) {
Object card = vector.get(currentCard);
cardDescription = ReflectionUtil.getField(card.getClass(), card, String.class, "name");
}
myProperties.add(new PropertyBean("CardLayout constraints",
String.format("card - %s, hgap - %s, vgap - %s",
cardDescription, cardLayout.getHgap(), cardLayout.getVgap())));
if (vector != null) {
for (Object card : vector) {
String cardName = ReflectionUtil.getField(card.getClass(), card, String.class, "name");
Component child = ReflectionUtil.getField(card.getClass(), card, Component.class, "comp");
myProperties.add(new PropertyBean(prefix + getComponentName(child), cardName));
}
}
}
else if (layout instanceof MigLayout) {
MigLayout migLayout = (MigLayout)layout;
myProperties.add(new PropertyBean("MigLayout constraints", migLayout.getColumnConstraints()));
for (Component child : component.getComponents()) {
myProperties.add(new PropertyBean(prefix + getComponentName(child), migLayout.getComponentConstraints(child)));
}
}
}
@NotNull
private static String toString(@Nullable GridBagConstraints constraints) {
if (constraints == null) return "null";
MoreObjects.ToStringHelper h = MoreObjects.toStringHelper("");
appendFieldValue(h, constraints, "gridx");
appendFieldValue(h, constraints, "gridy");
appendFieldValue(h, constraints, "gridwidth");
appendFieldValue(h, constraints, "gridheight");
appendFieldValue(h, constraints, "weightx");
appendFieldValue(h, constraints, "weighty");
appendFieldValue(h, constraints, "anchor");
appendFieldValue(h, constraints, "fill");
appendFieldValue(h, constraints, "insets");
appendFieldValue(h, constraints, "ipadx");
appendFieldValue(h, constraints, "ipady");
return h.toString();
}
private static void appendFieldValue(@NotNull MoreObjects.ToStringHelper h,
@NotNull GridBagConstraints constraints,
@NotNull String field) {
Object value = ReflectionUtil.getField(GridBagConstraints.class, constraints, null, field);
Object defaultValue = ReflectionUtil.getField(GridBagConstraints.class, new GridBagConstraints(), null, field);
if (!Comparing.equal(value, defaultValue)) h.add(field, value);
}
@Nullable
public Object getValueAt(int row, int column) {
final PropertyBean bean = myProperties.get(row);
if (bean != null) {
switch (column) {
case 0:
return bean.propertyName;
default:
return bean.propertyValue;
}
}
return null;
}
@Override
public boolean isCellEditable(int row, int col) {
return col == 1 && updater(myProperties.get(row)) != null;
}
@Override
public void setValueAt(Object value, int row, int col) {
PropertyBean bean = myProperties.get(row);
try {
myProperties.set(row, new PropertyBean(bean.propertyName, ObjectUtils.notNull(updater(bean)).fun(value)));
}
catch (Exception ignored) {
}
}
@Nullable
public Function<Object, Object> updater(PropertyBean bean) {
if (myComponent == null) return null;
String name = bean.propertyName.trim();
try {
try {
Method getter;
try {
getter = myComponent.getClass().getMethod("get" + StringUtil.capitalize(name));
}
catch (Exception e) {
getter = myComponent.getClass().getMethod("is" + StringUtil.capitalize(name));
}
final Method finalGetter = getter;
final Method setter = myComponent.getClass().getMethod("set" + StringUtil.capitalize(name), getter.getReturnType());
setter.setAccessible(true);
return o -> {
try {
setter.invoke(myComponent, fromObject(o, finalGetter.getReturnType()));
return finalGetter.invoke(myComponent);
}
catch (Exception e) {
throw new RuntimeException(e);
}
};
}
catch (Exception e) {
final Field field = ReflectionUtil.findField(myComponent.getClass(), null, name);
if (Modifier.isFinal(field.getModifiers()) || Modifier.isStatic(field.getModifiers())) {
return null;
}
return o -> {
try {
field.set(myComponent, fromObject(o, field.getType()));
return field.get(myComponent);
}
catch (Exception e1) {
throw new RuntimeException(e1);
}
};
}
}
catch (Exception ignored) {
}
return null;
}
public int getColumnCount() {
return 2;
}
public int getRowCount() {
return myProperties.size();
}
public String getColumnName(int columnIndex) {
return columnIndex == 0 ? "Property" : "Value";
}
public Class<?> getColumnClass(int columnIndex) {
return columnIndex == 0 ? String.class : Object.class;
}
public void refresh() {
myProperties.clear();
fillTable();
fireTableDataChanged();
}
}
private static class UiInspector implements AWTEventListener, Disposable {
public UiInspector() {
Toolkit.getDefaultToolkit().addAWTEventListener(this, AWTEvent.MOUSE_EVENT_MASK | AWTEvent.CONTAINER_EVENT_MASK);
}
public void dispose() {
Toolkit.getDefaultToolkit().removeAWTEventListener(this);
for (Window window : Window.getWindows()) {
if (window instanceof InspectorWindow) {
((InspectorWindow)window).close();
}
}
}
public void showInspector(@NotNull Component c) {
Window window = new InspectorWindow(c);
window.pack();
window.setVisible(true);
window.toFront();
}
public void eventDispatched(AWTEvent event) {
if (event instanceof MouseEvent) {
processMouseEvent((MouseEvent)event);
}
else if (event instanceof ContainerEvent) {
processContainerEvent((ContainerEvent)event);
}
}
private void processMouseEvent(MouseEvent me) {
if (!me.isAltDown() || !me.isControlDown()) return;
if (me.getClickCount() != 1 || me.isPopupTrigger()) return;
me.consume();
if (me.getID() != MouseEvent.MOUSE_RELEASED) return;
Component component = me.getComponent();
if (component instanceof Container) {
component = ((Container)component).findComponentAt(me.getPoint());
}
else if (component == null) {
component = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner();
}
if (component != null) {
if (component instanceof JComponent) {
((JComponent)component).putClientProperty(CLICK_INFO, getClickInfo(me, component));
}
showInspector(component);
}
}
private static List<PropertyBean> getClickInfo(MouseEvent me, Component component) {
if (me.getComponent() == null) return null;
me = SwingUtilities.convertMouseEvent(me.getComponent(), me, component);
List<PropertyBean> clickInfo = new ArrayList<>();
//clickInfo.add(new PropertyBean("Click point", me.getPoint()));
if (component instanceof JList) {
JList list = (JList)component;
int row = list.getUI().locationToIndex(list, me.getPoint());
if (row != -1) {
Component rendererComponent = list.getCellRenderer()
.getListCellRendererComponent(list, list.getModel().getElementAt(row), row, list.getSelectionModel().isSelectedIndex(row),
list.hasFocus());
clickInfo.add(new PropertyBean(RENDERER_BOUNDS, list.getUI().getCellBounds(list, row, row)));
clickInfo.addAll(new InspectorTableModel(rendererComponent).myProperties);
return clickInfo;
}
}
if (component instanceof JTable) {
JTable table = (JTable)component;
int row = table.rowAtPoint(me.getPoint());
int column = table.columnAtPoint(me.getPoint());
if (row != -1 && column != -1) {
Component rendererComponent = table.getCellRenderer(row, column)
.getTableCellRendererComponent(table, table.getValueAt(row, column), table.getSelectionModel().isSelectedIndex(row),
table.hasFocus(), row, column);
clickInfo.add(new PropertyBean(RENDERER_BOUNDS, table.getCellRect(row, column, true)));
clickInfo.addAll(new InspectorTableModel(rendererComponent).myProperties);
return clickInfo;
}
}
if (component instanceof JTree) {
JTree tree = (JTree)component;
TreePath path = tree.getClosestPathForLocation(me.getX(), me.getY());
if (path != null) {
Object object = path.getLastPathComponent();
Component rendererComponent = tree.getCellRenderer().getTreeCellRendererComponent(
tree, object, tree.getSelectionModel().isPathSelected(path),
tree.isExpanded(path),
tree.getModel().isLeaf(object),
tree.getRowForPath(path), tree.hasFocus());
clickInfo.add(new PropertyBean(RENDERER_BOUNDS, tree.getPathBounds(path)));
clickInfo.addAll(new InspectorTableModel(rendererComponent).myProperties);
return clickInfo;
}
}
return null;
}
private static void processContainerEvent(ContainerEvent event) {
Component child = event.getID() == ContainerEvent.COMPONENT_ADDED ? event.getChild() : null;
if (child instanceof JComponent && !(event.getSource() instanceof CellRendererPane)) {
String text = ExceptionUtil.getThrowableText(new Throwable());
int first = text.indexOf("at com.intellij", text.indexOf("at java.awt"));
int last = text.indexOf("at java.awt.EventQueue");
if (last == -1) last = text.length();
String val = last > first && first > 0 ? text.substring(first, last): null;
((JComponent)child).putClientProperty("uiInspector.addedAt", val);
}
}
}
/** @noinspection UseJBColor*/
private static Object fromObject(Object o, Class<?> type) {
if (o == null) return null;
if (type.isAssignableFrom(o.getClass())) return o;
if ("null".equals(o)) return null;
String value = String.valueOf(o).trim();
if (type == int.class) return Integer.parseInt(value);
if (type == boolean.class) return "yes".equalsIgnoreCase(value) || "true".equalsIgnoreCase(value);
if (type == byte.class) return Byte.parseByte(value);
if (type == short.class) return Short.parseShort(value);
if (type == double.class) return Double.parseDouble(value);
if (type == float.class) return Float.parseFloat(value);
String[] s = value.split("(?i)\\s*(?:[x@:]|[a-z]+:)\\s*", 6);
if (type == Dimension.class) {
if (s.length == 2) return new Dimension(Integer.parseInt(s[0]), Integer.parseInt(s[1]));
}
else if (type == Point.class) {
if (s.length == 2) return new Point(Integer.parseInt(s[0]), Integer.parseInt(s[1]));
}
else if (type == Rectangle.class) {
if (s.length >= 5) {
return new Rectangle(Integer.parseInt(s[3]), Integer.parseInt(s[4]),
Integer.parseInt(s[1]), Integer.parseInt(s[2]));
}
}
else if (type == Insets.class) {
if (s.length >= 5) {
return new Insets(Integer.parseInt(s[1]), Integer.parseInt(s[2]),
Integer.parseInt(s[4]), Integer.parseInt(s[4]));
}
}
else if (type == Color.class) {
if (s.length >= 5) {
return new ColorUIResource(
new Color(Integer.parseInt(s[1]), Integer.parseInt(s[2]), Integer.parseInt(s[3]), Integer.parseInt(s[4])));
}
}
throw new UnsupportedOperationException(type.toString());
}
}
| |
package org.apache.solr.core;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.handler.TestBlobHandler;
import org.apache.solr.util.RESTfulServerProvider;
import org.apache.solr.util.RestTestHarness;
import org.apache.solr.util.SimplePostTool;
import org.junit.After;
import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
public class TestDynamicLoading extends AbstractFullDistribZkTestBase {
static final Logger log = LoggerFactory.getLogger(TestDynamicLoading.class);
private List<RestTestHarness> restTestHarnesses = new ArrayList<>();
private void setupHarnesses() {
for (final SolrClient client : clients) {
RestTestHarness harness = new RestTestHarness(new RESTfulServerProvider() {
@Override
public String getBaseURL() {
return ((HttpSolrClient)client).getBaseURL();
}
});
restTestHarnesses.add(harness);
}
}
@BeforeClass
public static void enableRuntimeLib() throws Exception {
System.setProperty("enable.runtime.lib", "true");
}
@After
public void testDown() throws Exception {
super.tearDown();
for (RestTestHarness r : restTestHarnesses) {
r.close();
}
}
@Override
public void doTest() throws Exception {
setupHarnesses();
dynamicLoading();
}
private void dynamicLoading() throws Exception {
String payload = "{\n" +
"'create-requesthandler' : { 'name' : '/test1', 'class': 'org.apache.solr.core.BlobStoreTestRequestHandler' , 'lib':'test','version':'1'}\n" +
"}";
RestTestHarness client = restTestHarnesses.get(random().nextInt(restTestHarnesses.size()));
TestSolrConfigHandler.runConfigCommand(client,"/config?wt=json",payload);
TestSolrConfigHandler.testForResponseElement(client,
null,
"/config/overlay?wt=json",
null,
Arrays.asList("overlay", "requestHandler", "/test1", "lib"),
"test",10);
Map map = TestSolrConfigHandler.getRespMap("/test1?wt=json", client);
assertNotNull(TestBlobHandler.getAsString(map) , map = (Map) map.get("error"));
assertEquals(".system collection not available", map.get("msg"));
HttpSolrClient randomClient = (HttpSolrClient) clients.get(random().nextInt(clients.size()));
String baseURL = randomClient.getBaseURL();
baseURL = baseURL.substring(0, baseURL.lastIndexOf('/'));
TestBlobHandler.createSysColl(new HttpSolrClient(baseURL,randomClient.getHttpClient()));
map = TestSolrConfigHandler.getRespMap("/test1?wt=json", client);
assertNotNull(map = (Map) map.get("error"));
assertEquals("no such blob or version available: test/1", map.get("msg"));
ByteBuffer jar = generateZip( TestDynamicLoading.class,BlobStoreTestRequestHandler.class);
TestBlobHandler.postAndCheck(cloudClient, baseURL, jar,1);
boolean success= false;
for(int i=0;i<50;i++) {
map = TestSolrConfigHandler.getRespMap("/test1?wt=json", client);
if(BlobStoreTestRequestHandler.class.getName().equals(map.get("class"))){
success = true;
break;
}
Thread.sleep(100);
}
assertTrue(new String( ZkStateReader.toJSON(map) , StandardCharsets.UTF_8), success );
jar = generateZip( TestDynamicLoading.class,BlobStoreTestRequestHandlerV2.class);
TestBlobHandler.postAndCheck(cloudClient, baseURL, jar,2);
payload = " {\n" +
" 'set' : {'watched': {" +
" 'x':'X val',\n" +
" 'y': 'Y val'}\n" +
" }\n" +
" }";
TestSolrConfigHandler.runConfigCommand(client,"/config/params?wt=json",payload);
TestSolrConfigHandler.testForResponseElement(
client,
null,
"/config/params?wt=json",
cloudClient,
Arrays.asList("response", "params", "watched", "x"),
"X val",
10);
payload = "{\n" +
"'update-requesthandler' : { 'name' : '/test1', 'class': 'org.apache.solr.core.BlobStoreTestRequestHandlerV2' , 'lib':'test','version':2}\n" +
"}";
client = restTestHarnesses.get(random().nextInt(restTestHarnesses.size()));
TestSolrConfigHandler.runConfigCommand(client,"/config?wt=json",payload);
TestSolrConfigHandler.testForResponseElement(client,
null,
"/config/overlay?wt=json",
null,
Arrays.asList("overlay", "requestHandler", "/test1", "version"),
2l,10);
success= false;
for(int i=0;i<100;i++) {
map = TestSolrConfigHandler.getRespMap("/test1?wt=json", client);
if(BlobStoreTestRequestHandlerV2.class.getName().equals(map.get("class"))) {
success = true;
break;
}
Thread.sleep(100);
}
assertTrue("New version of class is not loaded " + new String(ZkStateReader.toJSON(map), StandardCharsets.UTF_8), success);
for(int i=0;i<100;i++) {
map = TestSolrConfigHandler.getRespMap("/test1?wt=json", client);
if("X val".equals(map.get("x"))){
success = true;
break;
}
Thread.sleep(100);
}
payload = " {\n" +
" 'set' : {'watched': {" +
" 'x':'X val changed',\n" +
" 'y': 'Y val'}\n" +
" }\n" +
" }";
TestSolrConfigHandler.runConfigCommand(client,"/config/params?wt=json",payload);
for(int i=0;i<50;i++) {
map = TestSolrConfigHandler.getRespMap("/test1?wt=json", client);
if("X val changed".equals(map.get("x"))){
success = true;
break;
}
Thread.sleep(100);
}
assertTrue("listener did not get triggered" + new String(ZkStateReader.toJSON(map), StandardCharsets.UTF_8), success);
}
public static ByteBuffer generateZip(Class... classes) throws IOException {
ZipOutputStream zipOut = null;
SimplePostTool.BAOS bos = new SimplePostTool.BAOS();
zipOut = new ZipOutputStream(bos);
zipOut.setLevel(ZipOutputStream.DEFLATED);
for (Class c : classes) {
String path = c.getName().replace('.', '/').concat(".class");
ZipEntry entry = new ZipEntry(path);
ByteBuffer b = SimplePostTool.inputStreamToByteArray(c.getClassLoader().getResourceAsStream(path));
zipOut.putNextEntry(entry);
zipOut.write(b.array(), 0, b.limit());
zipOut.closeEntry();
}
zipOut.close();
return bos.getByteBuffer();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.nagios;
import java.util.HashMap;
import java.util.Map;
import com.googlecode.jsendnsca.core.Level;
import com.googlecode.jsendnsca.core.MessagePayload;
import com.googlecode.jsendnsca.core.mocks.NagiosNscaStub;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* @version
*/
public class NagiosTest extends CamelTestSupport {
protected boolean canRun;
private NagiosNscaStub nagios;
@Before
@Override
public void setUp() throws Exception {
canRun = true;
nagios = new NagiosNscaStub(25667, "secret");
try {
nagios.start();
} catch (Exception e) {
log.warn("Error starting NagiosNscaStub. This exception is ignored.", e);
canRun = false;
}
super.setUp();
}
@After
@Override
public void tearDown() throws Exception {
super.tearDown();
try {
nagios.stop();
} catch (Exception e) {
// ignore
log.warn("Error stopping NagiosNscaStub. This exception is ignored.", e);
}
}
@Test
public void testSendToNagios() throws Exception {
if (!canRun) {
return;
}
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
mock.allMessages().body().isInstanceOf(String.class);
template.sendBody("direct:start", "Hello Nagios");
assertMockEndpointsSatisfied();
// sleep a little to let nagios stub process the payloads
Thread.sleep(2000);
assertEquals(1, nagios.getMessagePayloadList().size());
MessagePayload payload = nagios.getMessagePayloadList().get(0);
assertEquals("Hello Nagios", payload.getMessage());
assertEquals("localhost", payload.getHostname());
assertEquals(Level.OK.ordinal(), payload.getLevel());
assertEquals(context.getName(), payload.getServiceName());
}
@Test
public void testSendTwoToNagios() throws Exception {
if (!canRun) {
return;
}
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(2);
mock.allMessages().body().isInstanceOf(String.class);
template.sendBody("direct:start", "Hello Nagios");
template.sendBody("direct:start", "Bye Nagios");
assertMockEndpointsSatisfied();
// sleep a little to let nagios stub process the payloads
Thread.sleep(3000);
assertEquals(2, nagios.getMessagePayloadList().size());
MessagePayload payload = nagios.getMessagePayloadList().get(0);
assertEquals("Hello Nagios", payload.getMessage());
assertEquals("localhost", payload.getHostname());
assertEquals(Level.OK.ordinal(), payload.getLevel());
assertEquals(context.getName(), payload.getServiceName());
payload = nagios.getMessagePayloadList().get(1);
assertEquals("Bye Nagios", payload.getMessage());
assertEquals("localhost", payload.getHostname());
assertEquals(Level.OK.ordinal(), payload.getLevel());
assertEquals(context.getName(), payload.getServiceName());
}
@Test
public void testSendToNagiosWarn() throws Exception {
if (!canRun) {
return;
}
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
template.sendBodyAndHeader("direct:start", "Hello Nagios", NagiosConstants.LEVEL, Level.WARNING);
assertMockEndpointsSatisfied();
// sleep a little to let nagios stub process the payloads
Thread.sleep(2000);
assertEquals(1, nagios.getMessagePayloadList().size());
MessagePayload payload = nagios.getMessagePayloadList().get(0);
assertEquals("Hello Nagios", payload.getMessage());
assertEquals("localhost", payload.getHostname());
assertEquals(Level.WARNING.ordinal(), payload.getLevel());
assertEquals(context.getName(), payload.getServiceName());
}
@Test
public void testSendToNagiosWarnAsText() throws Exception {
if (!canRun) {
return;
}
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
template.sendBodyAndHeader("direct:start", "Hello Nagios", NagiosConstants.LEVEL, "WARNING");
assertMockEndpointsSatisfied();
// sleep a little to let nagios stub process the payloads
Thread.sleep(2000);
assertEquals(1, nagios.getMessagePayloadList().size());
MessagePayload payload = nagios.getMessagePayloadList().get(0);
assertEquals("Hello Nagios", payload.getMessage());
assertEquals("localhost", payload.getHostname());
assertEquals(Level.WARNING.ordinal(), payload.getLevel());
assertEquals(context.getName(), payload.getServiceName());
}
@Test
public void testSendToNagiosMultiHeaders() throws Exception {
if (!canRun) {
return;
}
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
Map<String, Object> headers = new HashMap<String, Object>();
headers.put(NagiosConstants.LEVEL, "CRITICAL");
headers.put(NagiosConstants.HOST_NAME, "myHost");
headers.put(NagiosConstants.SERVICE_NAME, "myService");
template.sendBodyAndHeaders("direct:start", "Hello Nagios", headers);
assertMockEndpointsSatisfied();
// sleep a little to let nagios stub process the payloads
Thread.sleep(2000);
assertEquals(1, nagios.getMessagePayloadList().size());
MessagePayload payload = nagios.getMessagePayloadList().get(0);
assertEquals("Hello Nagios", payload.getMessage());
assertEquals("myHost", payload.getHostname());
assertEquals(Level.CRITICAL.ordinal(), payload.getLevel());
assertEquals("myService", payload.getServiceName());
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
// START SNIPPET: e1
from("direct:start").to("nagios:127.0.0.1:25667?password=secret").to("mock:result");
// END SNIPPET: e1
}
};
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.jini.outrigger;
import com.sun.jini.landlord.LeasedResource;
import com.sun.jini.logging.Levels;
import com.sun.jini.proxy.MarshalledWrapper;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.InvalidObjectException;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.net.MalformedURLException;
import java.rmi.MarshalException;
import java.rmi.UnmarshalException;
import java.rmi.server.RMIClassLoader;
import java.security.DigestOutputStream;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.WeakHashMap;
import java.util.logging.Logger;
import net.jini.core.entry.Entry;
import net.jini.core.entry.UnusableEntryException;
import net.jini.id.Uuid;
import net.jini.id.UuidFactory;
import net.jini.io.MarshalledInstance;
import net.jini.loader.ClassLoading;
import net.jini.space.JavaSpace;
/**
* An <code>EntryRep</code> object contains a packaged
* <code>Entry</code> object for communication between the client and a
* <code>JavaSpace</code>.
*
* @author Sun Microsystems, Inc.
*
* @see JavaSpace
* @see Entry
*/
class EntryRep implements StorableResource, LeasedResource, Serializable {
static final long serialVersionUID = 3L;
/**
* The fields of the entry in marshalled form. Use <code>null</code>
* for <code>null</code> fields.
*/
private MarshalledInstance[] values;
private String[] superclasses; // class names of the superclasses
private long[] hashes; // superclass hashes
private long hash; // hash for the entry class
private String className; // the class ID of the entry
private String codebase; // the codebase for this entry class
private Uuid id; // space-relative storage id
private transient long expires;// expiration time
/**
* <code>true</code> if the last time this object was unmarshalled
* integrity was being enforced, <code>false</code> otherwise.
*/
private transient boolean integrity;
/** Comparator for sorting fields */
private static final FieldComparator comparator = new FieldComparator();
/**
* This object represents the passing of a <code>null</code>
* parameter as a template, which is designed to match any entry.
* When a <code>null</code> is passed, it is replaced with this
* rep, which is then handled specially in a few relevant places.
*/
private static final EntryRep matchAnyRep;
static {
try {
matchAnyRep = new EntryRep(new Entry() {
// keeps tests happy
static final long serialVersionUID = -4244768995726274609L;
}, false);
} catch (MarshalException e) {
throw new AssertionError(e);
}
}
/**
* The realClass object is transient because we neither need nor want
* it reconstituted on the other side. All we want is to be able to
* recreate it on the receiving client side. If it were not transient,
* not only would an unnecessary object creation occur, but it might
* force the download of the actual class to the server.
*/
private transient Class realClass; // real class of the contained object
/**
* Logger for logging information about operations carried out in
* the client. Note, we hard code "com.sun.jini.outrigger" so
* we don't drag in OutriggerServerImpl to outrigger-dl.jar.
*/
private static final Logger logger =
Logger.getLogger("com.sun.jini.outrigger.proxy");
/**
* Set this entry's generic data to be shared with the <code>other</code>
* object. Those fields that are object references that will be the same
* for all objects of the same type are shared this way.
* <p>
* Note that <code>codebase</code> is <em>not</em> shared. If it were,
* then the failure of one codebase could make all entries inaccessible.
* Each entry is usable insofar as the codebase under which it was
* written is usable.
*/
void shareWith(EntryRep other) {
className = other.className;
superclasses = other.superclasses;
hashes = other.hashes;
hash = other.hash;
}
/**
* Get the entry fields associated with the passed class and put
* them in a canonical order. The fields are sorted so that fields
* belonging to a superclasses are before fields belonging to
* subclasses and within a class fields are ordered
* lexicographically by their name.
*/
static private Field[] getFields(Class cl) {
final Field[] fields = cl.getFields();
Arrays.sort(fields, comparator);
return fields;
}
/**
* Cached hash values for all classes we encounter. Weak hash used
* in case the class is GC'ed from the client's VM.
*/
static private WeakHashMap classHashes;
/**
* Lookup the hash value for the given class. If it is not
* found in the cache, generate the hash for the class and
* save it.
*/
static synchronized private Long findHash(Class clazz,
boolean marshaling)
throws MarshalException, UnusableEntryException
{
if (classHashes == null)
classHashes = new WeakHashMap();
Long hash = (Long)classHashes.get(clazz);
// If hash not cached, calculate it for this class and,
// recursively, all superclasses
//
if (hash == null) {
try {
Field[] fields = getFields(clazz);
MessageDigest md = MessageDigest.getInstance("SHA");
DataOutputStream out =
new DataOutputStream(
new DigestOutputStream(new ByteArrayOutputStream(127),
md));
Class c = clazz.getSuperclass();
if (c != Object.class)
// recursive call
out.writeLong(findHash(c, marshaling).longValue());
// Hash only usable fields, this means that we do not
// detect changes in non-usable fields. This should be ok
// since those fields do not move between space and client.
//
for (int i = 0; i < fields.length; i++) {
if (!usableField(fields[i]))
continue;
out.writeUTF(fields[i].getName());
out.writeUTF(fields[i].getType().getName());
}
out.flush();
byte[] digest = md.digest();
long h = 0;
for (int i = Math.min(8, digest.length); --i >= 0; ) {
h += ((long)(digest[i] & 0xFF)) << (i * 8);
}
hash = new Long(h);
} catch (Exception e) {
if (marshaling)
throw throwNewMarshalException(
"Exception calculating entry class hash for " +
clazz, e);
else
throw throwNewUnusableEntryException(
"Exception calculating entry class hash for " +
clazz, e);
}
classHashes.put(clazz, hash);
}
return hash;
}
/**
* Create a serialized form of the entry. If <code>validate</code> is
* <code>true</code>, basic sanity checks are done on the class to
* ensure that it meets the requirements to be an <code>Entry</code>.
* <code>validate</code> is <code>false</code> only when creating the
* stand-in object for "match any", which is never actually marshalled
* on the wire and so which doesn't need to be "proper".
*/
private EntryRep(Entry entry, boolean validate) throws MarshalException {
realClass = entry.getClass();
if (validate)
ensureValidClass(realClass);
className = realClass.getName();
codebase = RMIClassLoader.getClassAnnotation(realClass);
/*
* Build up the per-field and superclass information through
* the reflection API.
*/
final Field[] fields = getFields(realClass);
int numFields = fields.length;
// collect the usable field values in vals[0..nvals-1]
MarshalledInstance[] vals = new MarshalledInstance[numFields];
int nvals = 0;
for (int fnum = 0; fnum < fields.length; fnum++) {
final Field field = fields[fnum];
if (!usableField(field))
continue;
final Object fieldValue;
try {
fieldValue = field.get(entry);
} catch (IllegalAccessException e) {
/* In general between using getFields() and
* ensureValidClass this should never happen, however
* there appear to be a few screw cases and
* IllegalArgumentException seems appropriate.
*/
final IllegalArgumentException iae =
new IllegalArgumentException("Couldn't access field " +
field);
iae.initCause(e);
throw throwRuntime(iae);
}
if (fieldValue == null) {
vals[nvals] = null;
} else {
try {
vals[nvals] = new MarshalledInstance(fieldValue);
} catch (IOException e) {
throw throwNewMarshalException(
"Can't marshal field " + field + " with value " +
fieldValue, e);
}
}
nvals++;
}
// copy the vals with the correct length
this.values = new MarshalledInstance[nvals];
System.arraycopy(vals, 0, this.values, 0, nvals);
try {
hash = findHash(realClass, true).longValue();
} catch (UnusableEntryException e) {
// Will never happen when we pass true to findHash
throw new AssertionError(e);
}
// Loop through the supertypes, making a list of all superclasses.
ArrayList sclasses = new ArrayList();
ArrayList shashes = new ArrayList();
for (Class c = realClass.getSuperclass();
c != Object.class;
c = c.getSuperclass())
{
try {
sclasses.add(c.getName());
shashes.add(findHash(c, true));
} catch (ClassCastException cce) {
break; // not Serializable
} catch (UnusableEntryException e) {
// Will never happen when we pass true to findHash
throw new AssertionError(e);
}
}
superclasses = (String[])sclasses.toArray(new String[sclasses.size()]);
hashes = new long[shashes.size()];
for (int i=0; i < hashes.length; i++) {
hashes[i] = ((Long)shashes.get(i)).longValue();
}
}
/**
* Create a serialized form of the entry with our object's
* relevant fields set.
*/
public EntryRep(Entry entry) throws MarshalException {
this(entry, true);
}
/** Used in recovery */
EntryRep() { }
/** Used to look up no-arg constructors. */
private static Class[] noArg = new Class[0];
/**
* Ensure that the entry class is valid, that is, that it has appropriate
* access. If not, throw <code>IllegalArgumentException</code>.
*/
private static void ensureValidClass(Class c) {
boolean ctorOK = false;
try {
if (!Modifier.isPublic(c.getModifiers())) {
throw throwRuntime(new IllegalArgumentException(
"entry class " + c.getName() + " not public"));
}
Constructor ctor = c.getConstructor(noArg);
ctorOK = Modifier.isPublic(ctor.getModifiers());
} catch (NoSuchMethodException e) {
ctorOK = false;
} catch (SecurityException e) {
ctorOK = false;
}
if (!ctorOK) {
throw throwRuntime(new IllegalArgumentException("entry class " +
c.getName() +" needs public no-arg constructor"));
}
}
/**
* The <code>EntryRep</code> that marks a ``match any'' request.
* This is used to represent a <code>null</code> template.
*/
static EntryRep matchAnyEntryRep() {
return matchAnyRep;
}
/**
* Return <code>true</code> if the given rep is that ``match any''
* <code>EntryRep</code>.
*/
private static boolean isMatchAny(EntryRep rep) {
return matchAnyRep.equals(rep);
}
/**
* Return the class name that is used by the ``match any'' EntryRep
*/
static String matchAnyClassName() {
return matchAnyRep.classFor();
}
/**
* Return an <code>Entry</code> object built out of this
* <code>EntryRep</code> This is used by the client-side proxy to
* convert the <code>EntryRep</code> it gets from the space server
* into the actual <code>Entry</code> object it represents.
*
* @throws UnusableEntryException
* One or more fields in the entry cannot be
* deserialized, or the class for the entry type
* itself cannot be deserialized.
*/
Entry entry() throws UnusableEntryException {
ObjectInputStream objIn = null;
try {
ArrayList badFields = null;
ArrayList except = null;
realClass = ClassLoading.loadClass(codebase, className,
null, integrity, null);
if (findHash(realClass, false).longValue() != hash)
throw throwNewUnusableEntryException(
new IncompatibleClassChangeError(realClass + " changed"));
Entry entryObj = (Entry) realClass.newInstance();
Field[] fields = getFields(realClass);
/*
* Loop through the fields, ensuring no primitives and
* checking for wildcards.
*/
int nvals = 0; // index into this.values[]
for (int i = 0; i < fields.length; i++) {
Throwable nested = null;
try {
if (!usableField(fields[i]))
continue;
final MarshalledInstance val = values[nvals++];
Object value = (val == null ? null : val.get(integrity));
fields[i].set(entryObj, value);
} catch (Throwable e) {
nested = e;
}
if (nested != null) { // some problem occurred
if (badFields == null) {
badFields = new ArrayList(fields.length);
except = new ArrayList(fields.length);
}
badFields.add(fields[i].getName());
except.add(nested);
}
}
/* See if any fields have vanished from the class,
* because of the hashing this should never happen but
* throwing an exception that provides more info
* (instead of AssertionError) seems harmless.
*/
if (nvals < values.length) {
throw throwNewUnusableEntryException(
entryObj, // should this be null?
null, // array of bad-field names
new Throwable[] { // array of exceptions
new IncompatibleClassChangeError(
"A usable field has been removed from " +
entryObj.getClass().getName() +
" since this EntryRep was created")
});
}
// if there were any bad fields, throw the exception
if (badFields != null) {
String[] bf =
(String[]) badFields.toArray(
new String[badFields.size()]);
Throwable[] ex =
(Throwable[]) except.toArray(new Throwable[bf.length]);
throw throwNewUnusableEntryException(entryObj, bf, ex);
}
// everything fine, return the entry
return entryObj;
} catch (InstantiationException e) {
/*
* If this happens outside a per-field deserialization then
* this is a complete failure The per-field ones are caught
* inside the per-field loop.
*/
throw throwNewUnusableEntryException(e);
} catch (ClassNotFoundException e) {
// see above
throw throwNewUnusableEntryException("Encountered a " +
"ClassNotFoundException while unmarshalling " + className, e);
} catch (IllegalAccessException e) {
// see above
throw throwNewUnusableEntryException(e);
} catch (RuntimeException e) {
// see above
throw throwNewUnusableEntryException("Encountered a " +
"RuntimeException while unmarshalling " + className, e);
} catch (MalformedURLException e) {
// see above
throw throwNewUnusableEntryException("Malformed URL " +
"associated with entry of type " + className, e);
} catch (MarshalException e) {
// because we call findHash() w/ false, should never happen
throw new AssertionError(e);
}
}
// inherit doc comment
public int hashCode() {
return className.hashCode();
}
/**
* To be equal, the other object must by an <code>EntryRep</code> for
* an object of the same class with the same values for each field.
* This is <em>not</em> a template match -- see <code>matches</code>.
*
* @see #matches
*/
public boolean equals(Object o) {
// The other passed in was null--obviously not equal
if (o == null)
return false;
// The other passed in was ME--obviously I'm the same as me...
if (this == o)
return true;
if (!(o instanceof EntryRep))
return false;
EntryRep other = (EntryRep) o;
// If we're not the same class then we can't be equal
if (hash != other.hash)
return false;
/* Paranoid check just to make sure we can't get an
* IndexOutOfBoundsException. Should never happen.
*/
if (values.length != other.values.length)
return false;
/* OPTIMIZATION:
* If we have a case where one element is null and the corresponding
* element within the object we're comparing ourselves with is
* non-null (or vice-versa), we can stop right here and declare the
* two objects to be unequal. This is slightly faster than checking
* the bytes themselves.
* LOGIC: They've both got to be null or both have got to be
* non-null or we're out-of-here...
*/
for (int i = 0; i < values.length; i++) {
if ((values[i] == null) && (other.values[i] != null))
return false;
if ((values[i] != null) && (other.values[i] == null))
return false;
}
/* The most expensive tests we save for last.
* Because we've made the null/non-null check above, we can
* simplify our comparison here: if our element is non-null,
* we know the other value is non-null, too.
* If any equals() calls from these element comparisons come
* back false then return false. If they all succeed, we fall
* through and return true (they were equal).
*/
for (int i = 0; i < values.length; i++) {
// Short-circuit evaluation if null, compare otherwise.
if (values[i] != null && !values[i].equals(other.values[i]))
return false;
}
return true;
}
/**
* Return <code>true</code> if the field is to be used for the
* entry. That is, return <code>true</code> if the field isn't
* <code>transient</code>, <code>static</code>, or <code>final</code>.
* @throws IllegalArgumentException
* The field is not <code>transient</code>,
* <code>static</code>, or <code>final</code>, but
* is primitive and hence not a proper field for
* an <code>Entry</code>.
*/
static private boolean usableField(Field field) {
// ignore anything that isn't a public non-static mutable field
final int ignoreMods =
(Modifier.TRANSIENT | Modifier.STATIC | Modifier.FINAL);
if ((field.getModifiers() & ignoreMods) != 0)
return false;
// if it isn't ignorable, it has to be an object of some kind
if (field.getType().isPrimitive()) {
throw throwRuntime(new IllegalArgumentException(
"primitive field, " + field + ", not allowed in an Entry"));
}
return true;
}
/**
* Return the ID.
*/
Uuid id() {
return id;
}
/**
* Pick a random <code>Uuid</code> and set our id field to it.
* @throws IllegalStateException if this method has already
* been called.
*/
void pickID() {
if (id != null)
throw new IllegalStateException("pickID called more than once");
id = UuidFactory.generate();
}
/**
* Return the <code>MarshalledObject</code> for the given field.
*/
public MarshalledInstance value(int fieldNum) {
return values[fieldNum];
}
/**
* Return the number of fields in this kind of entry.
*/
public int numFields() {
if (values != null) {
return values.length;
} else {
return 0;
}
}
/**
* Return the class name for this entry.
*/
public String classFor() {
return className;
}
/**
* Return the array names of superclasses of this entry type.
*/
public String[] superclasses() {
return superclasses;
}
/**
* Return the hash of this entry type.
*/
long getHash() {
return hash;
}
/**
* Return the array of superclass hashes of this entry type.
*/
long[] getHashes() {
return hashes;
}
/**
* See if the other object matches the template object this
* represents. (Note that even though "this" is a template, it may
* have no wildcards -- a template can have all values.)
*/
boolean matches(EntryRep other) {
/*
* We use the fact that this is the template in several ways in
* the method implementation. For instance, in this next loop,
* we know that the real object must be at least my type, which
* means (a) the field types already match, and (b) it has at
* least as many fields as the this does.
*/
//Note: If this object is the MatchAny template then
// return true (all entries match MatchAny)
if (EntryRep.isMatchAny(this))
return true;
for (int f = 0; f < values.length; f++) {
if (values[f] == null) { // skip wildcards
continue;
}
if (!values[f].equals(other.values[f])) {
return false;
}
}
return true; // no mismatches, so must be OK
}
public String toString() {
return ("EntryRep[" + className + "]");
}
/**
* Return <code>true</code> if this entry represents an object that
* is at least the type of the <code>otherClass</code>.
*/
boolean isAtLeastA(String otherClass) {
if (otherClass.equals(matchAnyClassName()))
// The other is a null template, all entries are at least entry.
return true;
if (className.equals(otherClass))
return true;
for (int i = 0; i < superclasses.length; i++)
if (superclasses[i].equals(otherClass))
return true;
return false;
}
/** Comparator for sorting fields. Cribbed from Reggie */
private static class FieldComparator implements Comparator {
public FieldComparator() {}
/** Super before subclass, alphabetical within a given class */
public int compare(Object o1, Object o2) {
Field f1 = (Field)o1;
Field f2 = (Field)o2;
if (f1 == f2)
return 0;
if (f1.getDeclaringClass() == f2.getDeclaringClass())
return f1.getName().compareTo(f2.getName());
if (f1.getDeclaringClass().isAssignableFrom(
f2.getDeclaringClass()))
return -1;
return 1;
}
}
/**
* Use <code>readObject</code> method to capture whether or
* not integrity was being enforced when this object was
* unmarshalled, and to perform basic integrity checks.
*/
private void readObject(ObjectInputStream in)
throws IOException, ClassNotFoundException
{
in.defaultReadObject();
if (className == null)
throw new InvalidObjectException("null className");
if (values == null)
throw new InvalidObjectException("null values");
if (superclasses == null)
throw new InvalidObjectException("null superclasses");
if (hashes == null)
throw new InvalidObjectException("null hashes");
if (hashes.length != superclasses.length)
throw new InvalidObjectException("hashes.length (" +
hashes.length + ") does not equal superclasses.length (" +
superclasses.length + ")");
// get value for integrity flag
integrity = MarshalledWrapper.integrityEnforced(in);
}
/**
* We should always have data in the stream, if this method
* gets called there is something wrong.
*/
private void readObjectNoData() throws InvalidObjectException {
throw new
InvalidObjectException("SpaceProxy should always have data");
}
// -------------------------------------------------------
// Methods required by LeasedResource and StorableResource
// -------------------------------------------------------
// inherit doc comment from LeasedResource
public void setExpiration(long newExpiration) {
expires = newExpiration;
}
// inherit doc comment from LeasedResource
public long getExpiration() {
return expires;
}
// inherit doc comment from LeasedResource
// We use the Rep ID as the cookie
public Uuid getCookie() {
return id;
}
// -------------------------------------
// Methods required by StorableResource
// -------------------------------------
// inherit doc comment
public void store(ObjectOutputStream out) throws IOException {
final long bits0;
final long bits1;
if (id == null) {
bits0 = 0;
bits1 = 0;
} else {
bits0 = id.getMostSignificantBits();
bits1 = id.getLeastSignificantBits();
}
out.writeLong(bits0);
out.writeLong(bits1);
out.writeLong(expires);
out.writeObject(codebase);
out.writeObject(className);
out.writeObject(superclasses);
out.writeObject(values);
out.writeLong(hash);
out.writeObject(hashes);
}
// inherit doc comment
public void restore(ObjectInputStream in)
throws IOException, ClassNotFoundException
{
final long bits0 = in.readLong();
final long bits1 = in.readLong();
if (bits0 == 0 && bits1 == 0) {
id = null;
} else {
id = UuidFactory.create(bits0, bits1);
}
expires = in.readLong();
codebase = (String)in.readObject();
className = (String)in.readObject();
superclasses = (String [])in.readObject();
values = (MarshalledInstance [])in.readObject();
hash = in.readLong();
hashes = (long[])in.readObject();
}
// Utility methods for throwing and logging exceptions
/** Log and throw a runtime exception */
private static RuntimeException throwRuntime(RuntimeException e) {
if (logger.isLoggable(Levels.FAILED)) {
logger.log(Levels.FAILED, e.getMessage(), e);
}
throw e;
}
/** Construct, log, and throw a new MarshalException */
private static MarshalException throwNewMarshalException(
String msg, Exception nested)
throws MarshalException
{
final MarshalException me = new MarshalException(msg, nested);
if (logger.isLoggable(Levels.FAILED)) {
logger.log(Levels.FAILED, msg, me);
}
throw me;
}
/**
* Construct, log, and throw a new UnusableEntryException
*/
private UnusableEntryException throwNewUnusableEntryException(
Entry partial, String[] badFields, Throwable[] exceptions)
throws UnusableEntryException
{
final UnusableEntryException uee =
new UnusableEntryException(partial, badFields, exceptions);
if (logger.isLoggable(Levels.FAILED)) {
logger.log(Levels.FAILED,
"failure constructing entry of type " + className, uee);
}
throw uee;
}
/**
* Construct, log, and throw a new UnusableEntryException, that
* raps a given exception.
*/
private static UnusableEntryException throwNewUnusableEntryException(
Throwable nested)
throws UnusableEntryException
{
final UnusableEntryException uee = new UnusableEntryException(nested);
if (logger.isLoggable(Levels.FAILED)) {
logger.log(Levels.FAILED, nested.getMessage(), uee);
}
throw uee;
}
/**
* Construct, log, and throw a new UnusableEntryException, that
* will rap a newly constructed UnmarshalException (that optional
* wraps a given exception).
*/
private static UnusableEntryException throwNewUnusableEntryException(
String msg, Exception nested)
throws UnusableEntryException
{
final UnmarshalException ue = new UnmarshalException(msg, nested);
final UnusableEntryException uee = new UnusableEntryException(ue);
if (logger.isLoggable(Levels.FAILED)) {
logger.log(Levels.FAILED, msg, uee);
}
throw uee;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.testframework.sm.runner.events;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import jetbrains.buildServer.messages.serviceMessages.TestFailed;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.Map;
public class TestFailedEvent extends TreeNodeEvent {
private final String myLocalizedFailureMessage;
private final String myStacktrace;
private final boolean myTestError;
private final String myComparisonFailureActualText;
private final String myComparisonFailureExpectedText;
private final String myExpectedFilePath;
private final String myActualFilePath;
private final long myDurationMillis;
private final boolean myExpectedFileTemp;
private final boolean myActualFileTemp;
public TestFailedEvent(@NotNull TestFailed testFailed, boolean testError) {
this(testFailed, testError, null);
}
public TestFailedEvent(@NotNull TestFailed testFailed, boolean testError, @Nullable String expectedFilePath) {
this(testFailed, testError, expectedFilePath, null);
}
public TestFailedEvent(@NotNull TestFailed testFailed,
boolean testError,
@Nullable String expectedFilePath,
@Nullable String actualFilePath) {
super(testFailed.getTestName(), TreeNodeEvent.getNodeId(testFailed));
if (testFailed.getFailureMessage() == null) throw new NullPointerException();
myLocalizedFailureMessage = testFailed.getFailureMessage();
myStacktrace = testFailed.getStacktrace();
myTestError = testError;
myExpectedFilePath = expectedFilePath;
String expected = testFailed.getExpected();
if (expected == null && expectedFilePath != null) {
try {
expected = FileUtil.loadFile(new File(expectedFilePath));
}
catch (IOException ignore) {}
}
myComparisonFailureExpectedText = expected;
myActualFilePath = actualFilePath;
String actual = testFailed.getActual();
if (actual == null && actualFilePath != null) {
try {
actual = FileUtil.loadFile(new File(actualFilePath));
}
catch (IOException ignore) {}
}
myComparisonFailureActualText = actual;
Map<String, String> attributes = testFailed.getAttributes();
myDurationMillis = parseDuration(attributes.get("duration"));
myActualFileTemp = Boolean.parseBoolean(attributes.get("actualIsTempFile"));
myExpectedFileTemp = Boolean.parseBoolean(attributes.get("expectedIsTempFile"));
}
public boolean isExpectedFileTemp() {
return myExpectedFileTemp;
}
public boolean isActualFileTemp() {
return myActualFileTemp;
}
private static long parseDuration(@Nullable String durationStr) {
if (!StringUtil.isEmpty(durationStr)) {
try {
return Long.parseLong(durationStr);
}
catch (NumberFormatException ignored) {
}
}
return -1;
}
public TestFailedEvent(@NotNull String testName,
@NotNull String localizedFailureMessage,
@Nullable String stackTrace,
boolean testError,
@Nullable String comparisonFailureActualText,
@Nullable String comparisonFailureExpectedText) {
this(testName,
null,
localizedFailureMessage,
stackTrace,
testError,
comparisonFailureActualText,
comparisonFailureExpectedText,
null,
null,
false,
false,
-1);
}
public TestFailedEvent(@Nullable String testName,
@Nullable String id,
@NotNull String localizedFailureMessage,
@Nullable String stackTrace,
boolean testError,
@Nullable String comparisonFailureActualText,
@Nullable String comparisonFailureExpectedText,
@Nullable String expectedFilePath,
@Nullable String actualFilePath,
boolean expectedFileTemp,
boolean actualFileTemp,
long durationMillis) {
super(testName, id);
myLocalizedFailureMessage = localizedFailureMessage;
myStacktrace = stackTrace;
myTestError = testError;
myExpectedFilePath = expectedFilePath;
if (comparisonFailureExpectedText == null && expectedFilePath != null) {
try {
comparisonFailureExpectedText = FileUtil.loadFile(new File(expectedFilePath));
}
catch (IOException ignore) {}
}
myComparisonFailureActualText = comparisonFailureActualText;
myActualFilePath = actualFilePath;
myComparisonFailureExpectedText = comparisonFailureExpectedText;
myDurationMillis = durationMillis;
myExpectedFileTemp = expectedFileTemp;
myActualFileTemp = actualFileTemp;
}
@NotNull
public String getLocalizedFailureMessage() {
return myLocalizedFailureMessage;
}
@Nullable
public String getStacktrace() {
return myStacktrace;
}
public boolean isTestError() {
return myTestError;
}
@Nullable
public String getComparisonFailureActualText() {
return myComparisonFailureActualText;
}
@Nullable
public String getComparisonFailureExpectedText() {
return myComparisonFailureExpectedText;
}
@Override
protected void appendToStringInfo(@NotNull StringBuilder buf) {
append(buf, "localizedFailureMessage", myLocalizedFailureMessage);
append(buf, "stacktrace", myStacktrace);
append(buf, "isTestError", myTestError);
append(buf, "comparisonFailureActualText", myComparisonFailureActualText);
append(buf, "comparisonFailureExpectedText", myComparisonFailureExpectedText);
}
/**
* @deprecated use {@link #getExpectedFilePath()} instead
*/
@Deprecated
public String getFilePath() {
return myExpectedFilePath;
}
@Nullable
public String getExpectedFilePath() {
return myExpectedFilePath;
}
@Nullable
public String getActualFilePath() {
return myActualFilePath;
}
/**
* @return the test duration in milliseconds, or -1 if undefined
*/
public long getDurationMillis() {
return myDurationMillis;
}
}
| |
/*
* Copyright 2011 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.pmml.pmml_4_1;
import org.dmg.pmml.pmml_4_1.descr.ClusteringModel;
import org.dmg.pmml.pmml_4_1.descr.NaiveBayesModel;
import org.dmg.pmml.pmml_4_1.descr.NeuralNetwork;
import org.dmg.pmml.pmml_4_1.descr.PMML;
import org.dmg.pmml.pmml_4_1.descr.RegressionModel;
import org.dmg.pmml.pmml_4_1.descr.Scorecard;
import org.dmg.pmml.pmml_4_1.descr.SupportVectorMachineModel;
import org.dmg.pmml.pmml_4_1.descr.TreeModel;
import org.drools.compiler.compiler.PMMLCompiler;
import org.drools.core.io.impl.ByteArrayResource;
import org.drools.core.io.impl.ClassPathResource;
import org.kie.api.KieBase;
import org.kie.api.KieServices;
import org.kie.api.io.Resource;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.KieContainer;
import org.kie.api.runtime.KieSession;
import org.kie.internal.builder.KnowledgeBuilderResult;
import org.kie.internal.io.ResourceFactory;
import org.mvel2.templates.SimpleTemplateRegistry;
import org.mvel2.templates.TemplateCompiler;
import org.mvel2.templates.TemplateRegistry;
import org.xml.sax.SAXException;
import javax.xml.XMLConstants;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
public class PMML4Compiler implements PMMLCompiler {
public static final String PMML_NAMESPACE = "org.dmg.pmml.pmml_4_1";
public static final String PMML_DROOLS = "org.drools.pmml.pmml_4_1";
public static final String PMML = PMML_NAMESPACE + ".descr";
public static final String SCHEMA_PATH = "xsd/org/dmg/pmml/pmml_4_1/pmml-4-1.xsd";
public static final String BASE_PACK = PMML_DROOLS.replace('.','/');
protected static boolean globalLoaded = false;
protected static final String[] GLOBAL_TEMPLATES = new String[] {
"global/pmml_header.drlt",
"global/pmml_import.drlt",
"global/modelMark.drlt",
"global/dataDefinition/common.drlt",
"global/dataDefinition/rootDataField.drlt",
"global/dataDefinition/inputBinding.drlt",
"global/dataDefinition/outputBinding.drlt",
"global/dataDefinition/ioTypeDeclare.drlt",
"global/dataDefinition/updateIOField.drlt",
"global/dataDefinition/inputFromEP.drlt",
"global/dataDefinition/inputBean.drlt",
"global/dataDefinition/outputBean.drlt",
"global/manipulation/confirm.drlt",
"global/manipulation/mapMissingValues.drlt",
"global/manipulation/propagateMissingValues.drlt",
"global/validation/intervalsOnDomainRestriction.drlt",
"global/validation/valuesOnDomainRestriction.drlt",
"global/validation/valuesOnDomainRestrictionMissing.drlt",
"global/validation/valuesOnDomainRestrictionInvalid.drlt",
};
protected static boolean transformationLoaded = false;
protected static final String[] TRANSFORMATION_TEMPLATES = new String[] {
"transformations/normContinuous/boundedLowerOutliers.drlt",
"transformations/normContinuous/boundedUpperOutliers.drlt",
"transformations/normContinuous/normContOutliersAsMissing.drlt",
"transformations/normContinuous/linearTractNormalization.drlt",
"transformations/normContinuous/lowerExtrapolateLinearTractNormalization.drlt",
"transformations/normContinuous/upperExtrapolateLinearTractNormalization.drlt",
"transformations/aggregate/aggregate.drlt",
"transformations/aggregate/collect.drlt",
"transformations/simple/constantField.drlt",
"transformations/simple/aliasedField.drlt",
"transformations/normDiscrete/indicatorFieldYes.drlt",
"transformations/normDiscrete/indicatorFieldNo.drlt",
"transformations/normDiscrete/predicateField.drlt",
"transformations/discretize/intervalBinning.drlt",
"transformations/discretize/outOfBinningDefault.drlt",
"transformations/discretize/outOfBinningMissing.drlt",
"transformations/mapping/mapping.drlt",
"transformations/functions/apply.drlt",
"transformations/functions/function.drlt"
};
protected static boolean miningLoaded = false;
protected static final String[] MINING_TEMPLATES = new String[] {
"models/common/mining/miningField.drlt",
"models/common/mining/miningFieldInvalid.drlt",
"models/common/mining/miningFieldMissing.drlt",
"models/common/mining/miningFieldOutlierAsMissing.drlt",
"models/common/mining/miningFieldOutlierAsExtremeLow.drlt",
"models/common/mining/miningFieldOutlierAsExtremeUpp.drlt",
"models/common/target/targetReshape.drlt",
"models/common/target/aliasedOutput.drlt",
"models/common/target/addOutputFeature.drlt",
"models/common/target/addRelOutputFeature.drlt",
"models/common/target/outputQuery.drlt",
"models/common/target/outputQueryPredicate.drlt"
};
protected static boolean neuralLoaded = false;
protected static final String[] NEURAL_TEMPLATES = new String[] {
"models/neural/neuralBeans.drlt",
"models/neural/neuralWireInput.drlt",
"models/neural/neuralBuildSynapses.drlt",
"models/neural/neuralBuildNeurons.drlt",
"models/neural/neuralLinkSynapses.drlt",
"models/neural/neuralFire.drlt",
"models/neural/neuralLayerMaxNormalization.drlt",
"models/neural/neuralLayerSoftMaxNormalization.drlt",
"models/neural/neuralOutputField.drlt",
"models/neural/neuralClean.drlt"
};
protected static boolean svmLoaded = false;
protected static final String[] SVM_TEMPLATES = new String[] {
"models/svm/svmParams.drlt",
"models/svm/svmDeclare.drlt",
"models/svm/svmFunctions.drlt",
"models/svm/svmBuild.drlt",
"models/svm/svmInitSupportVector.drlt",
"models/svm/svmInitInputVector.drlt",
"models/svm/svmKernelEval.drlt",
"models/svm/svmOutputGeneration.drlt",
"models/svm/svmOutputVoteDeclare.drlt",
"models/svm/svmOutputVote1vN.drlt",
"models/svm/svmOutputVote1v1.drlt",
};
protected static boolean naiveBayesLoaded = false;
protected static final String[] NAIVE_BAYES_TEMPLATES = new String[] {
"models/bayes/naiveBayesDeclare.drlt",
"models/bayes/naiveBayesEval.drlt",
"models/bayes/naiveBayesBuildCounts.drlt",
"models/bayes/naiveBayesBuildOuts.drlt",
};
protected static boolean simpleRegLoaded = false;
protected static final String[] SIMPLEREG_TEMPLATES = new String[] {
"models/regression/regDeclare.drlt",
"models/regression/regCommon.drlt",
"models/regression/regParams.drlt",
"models/regression/regEval.drlt",
"models/regression/regClaxOutput.drlt",
"models/regression/regNormalization.drlt",
"models/regression/regDecumulation.drlt",
};
protected static boolean clusteringLoaded = false;
protected static final String[] CLUSTERING_TEMPLATES = new String[] {
"models/clustering/clusteringDeclare.drlt",
"models/clustering/clusteringInit.drlt",
"models/clustering/clusteringEvalDistance.drlt",
"models/clustering/clusteringEvalSimilarity.drlt",
"models/clustering/clusteringMatrixCompare.drlt"
};
protected static boolean treeLoaded = false;
protected static final String[] TREE_TEMPLATES = new String[] {
"models/tree/treeDeclare.drlt",
"models/tree/treeCommon.drlt",
"models/tree/treeInputDeclare.drlt",
"models/tree/treeInit.drlt",
"models/tree/treeAggregateEval.drlt",
"models/tree/treeDefaultEval.drlt",
"models/tree/treeEval.drlt",
"models/tree/treeIOBinding.drlt",
"models/tree/treeMissHandleAggregate.drlt",
"models/tree/treeMissHandleWeighted.drlt",
"models/tree/treeMissHandleLast.drlt",
"models/tree/treeMissHandleNull.drlt",
"models/tree/treeMissHandleNone.drlt"
};
protected static boolean scorecardLoaded = false;
protected static final String[] SCORECARD_TEMPLATES = new String[] {
"models/scorecard/scorecardInit.drlt",
"models/scorecard/scorecardParamsInit.drlt",
"models/scorecard/scorecardDeclare.drlt",
"models/scorecard/scorecardDataDeclare.drlt",
"models/scorecard/scorecardPartialScore.drlt",
"models/scorecard/scorecardScoring.drlt",
"models/scorecard/scorecardOutputGeneration.drlt",
"models/scorecard/scorecardOutputRankCode.drlt"
};
protected static final String RESOURCE_PATH = BASE_PACK;
protected static final String TEMPLATE_PATH = "/" + RESOURCE_PATH + "/templates/";
private static TemplateRegistry registry;
private static List<KnowledgeBuilderResult> visitorBuildResults = new ArrayList<KnowledgeBuilderResult>();
private List<KnowledgeBuilderResult> results;
private Schema schema;
private PMML4Helper helper;
public PMML4Compiler() {
super();
this.results = new ArrayList<KnowledgeBuilderResult>();
helper = new PMML4Helper();
helper.setPack( "org.drools.pmml.pmml_4_1.test" );
SchemaFactory sf = SchemaFactory.newInstance( XMLConstants.W3C_XML_SCHEMA_NS_URI );
try {
schema = sf.newSchema( Thread.currentThread().getContextClassLoader().getResource( SCHEMA_PATH ) );
} catch ( SAXException e ) {
e.printStackTrace();
}
}
public PMML4Helper getHelper() {
return helper;
}
public String generateTheory( PMML pmml ) {
StringBuilder sb = new StringBuilder();
//dumpModel( pmml, System.out );
KieBase visitor;
try {
visitor = checkBuildingResources( pmml );
} catch ( IOException e ) {
this.results.add( new PMMLError( e.getMessage() ) );
return null;
}
KieSession visitorSession = visitor.newKieSession();
helper.reset();
visitorSession.setGlobal( "registry", registry );
visitorSession.setGlobal( "fld2var", new HashMap() );
visitorSession.setGlobal( "utils", helper );
visitorSession.setGlobal( "theory", sb );
visitorSession.insert( pmml );
visitorSession.fireAllRules();
String modelEvaluatingRules = sb.toString();
visitorSession.dispose();
//System.out.println( modelEvaluatingRules );
return modelEvaluatingRules;
}
private static void initRegistry() {
if ( registry == null ) {
registry = new SimpleTemplateRegistry();
}
if ( ! globalLoaded ) {
for ( String ntempl : GLOBAL_TEMPLATES ) {
prepareTemplate( ntempl );
}
globalLoaded = true;
}
if ( ! transformationLoaded ) {
for ( String ntempl : TRANSFORMATION_TEMPLATES ) {
prepareTemplate( ntempl );
}
transformationLoaded = true;
}
if ( ! miningLoaded ) {
for ( String ntempl : MINING_TEMPLATES ) {
prepareTemplate( ntempl );
}
miningLoaded = true;
}
}
private static KieBase checkBuildingResources( PMML pmml ) throws IOException {
KieServices ks = KieServices.Factory.get();
KieContainer kieContainer = ks.getKieClasspathContainer();
if ( registry == null ) {
initRegistry();
}
String chosenKieBase = null;
for ( Object o : pmml.getAssociationModelsAndBaselineModelsAndClusteringModels() ) {
if ( o instanceof NaiveBayesModel ) {
if ( ! naiveBayesLoaded ) {
for ( String ntempl : NAIVE_BAYES_TEMPLATES ) {
prepareTemplate( ntempl );
}
naiveBayesLoaded = true;
}
chosenKieBase = chosenKieBase == null ? "PMML-Bayes" : "PMML";
}
if ( o instanceof NeuralNetwork ) {
if ( ! neuralLoaded ) {
for ( String ntempl : NEURAL_TEMPLATES ) {
prepareTemplate( ntempl );
}
neuralLoaded = true;
}
chosenKieBase = chosenKieBase == null ? "PMML-Neural" : "PMML";
}
if ( o instanceof ClusteringModel ) {
if ( ! clusteringLoaded ) {
for ( String ntempl : CLUSTERING_TEMPLATES ) {
prepareTemplate( ntempl );
}
clusteringLoaded = true;
}
chosenKieBase = chosenKieBase == null ? "PMML-Cluster" : "PMML";
}
if ( o instanceof SupportVectorMachineModel ) {
if ( ! svmLoaded ) {
for ( String ntempl : SVM_TEMPLATES ) {
prepareTemplate( ntempl );
}
svmLoaded = true;
}
chosenKieBase = chosenKieBase == null ? "PMML-SVM" : "PMML";
}
if ( o instanceof TreeModel ) {
if ( ! treeLoaded ) {
for ( String ntempl : TREE_TEMPLATES ) {
prepareTemplate( ntempl );
}
treeLoaded = true;
}
chosenKieBase = chosenKieBase == null ? "PMML-Tree" : "PMML";
}
if ( o instanceof RegressionModel ) {
if ( ! simpleRegLoaded ) {
for ( String ntempl : SIMPLEREG_TEMPLATES ) {
prepareTemplate( ntempl );
}
simpleRegLoaded = true;
}
chosenKieBase = chosenKieBase == null ? "PMML-Regression" : "PMML";
}
if ( o instanceof Scorecard ) {
if ( ! scorecardLoaded ) {
for ( String ntempl : SCORECARD_TEMPLATES ) {
prepareTemplate( ntempl );
}
scorecardLoaded = true;
}
chosenKieBase = chosenKieBase == null ? "PMML-Scorecard" : "PMML";
}
}
if ( chosenKieBase == null ) {
chosenKieBase = "PMML-Base";
}
return kieContainer.getKieBase( chosenKieBase );
}
private static void prepareTemplate( String ntempl ) {
try {
String path = TEMPLATE_PATH + ntempl;
Resource res = ResourceFactory.newClassPathResource(path, PMML4Compiler.class);
if ( res != null ) {
InputStream stream = res.getInputStream();
if ( stream != null ) {
registry.addNamedTemplate( path.substring(path.lastIndexOf('/') + 1),
TemplateCompiler.compileTemplate(stream));
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
public String compile( String resource, ClassLoader classLoader ) {
String theory = null;
Resource cpr = new ClassPathResource( resource );
try {
theory = compile( cpr.getInputStream(), classLoader );
} catch ( IOException e ) {
results.add( new PMMLError( e.toString() ) );
e.printStackTrace();
}
return theory;
}
public Resource[] transform( Resource resource, ClassLoader classLoader ) {
String theory = null;
try {
theory = compile( resource.getInputStream(), classLoader );
} catch ( IOException e ) {
results.add( new PMMLError( e.toString() ) );
e.printStackTrace();
return new Resource[ 0 ];
}
return new Resource[] { buildOutputResource( resource, theory ) };
}
private Resource buildOutputResource( Resource resource, String theory ) {
ByteArrayResource byteArrayResource = new ByteArrayResource( theory.getBytes() );
byteArrayResource.setResourceType( ResourceType.PMML );
if ( resource.getSourcePath() != null ) {
String originalPath = resource.getSourcePath();
int start = originalPath.lastIndexOf( File.separator );
byteArrayResource.setSourcePath( "generated-sources/" + originalPath.substring( start ) + ".pmml" );
} else {
byteArrayResource.setSourcePath( "generated-sources/" + helper.getContext() + ".pmml" );
}
return byteArrayResource;
}
public String compile( InputStream source, ClassLoader classLoader ) {
this.results = new ArrayList<KnowledgeBuilderResult>();
PMML pmml = loadModel( PMML, source );
helper.setResolver( classLoader );
if ( getResults().isEmpty() ) {
return generateTheory( pmml );
} else {
return null;
}
}
public List<KnowledgeBuilderResult> getResults() {
List<KnowledgeBuilderResult> combinedResults = new ArrayList<KnowledgeBuilderResult>( this.results );
combinedResults.addAll( visitorBuildResults );
return combinedResults;
}
@Override
public void clearResults() {
this.results.clear();
}
public void dump( String s, OutputStream ostream ) {
// write to outstream
Writer writer = null;
try {
writer = new OutputStreamWriter( ostream, "UTF-8" );
writer.write(s);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
finally {
try {
if (writer != null) {
writer.flush();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* Imports a PMML source file, returning a Java descriptor
* @param model the PMML package name (classes derived from a specific schema)
* @param source the name of the PMML resource storing the predictive model
* @return the Java Descriptor of the PMML resource
*/
public PMML loadModel( String model, InputStream source ) {
try {
if ( schema == null ) {
visitorBuildResults.add( new PMMLWarning( ResourceFactory.newInputStreamResource( source ), "Could not validate PMML document, schema not available" ) );
}
JAXBContext jc = JAXBContext.newInstance( model );
Unmarshaller unmarshaller = jc.createUnmarshaller();
if ( schema != null ) {
unmarshaller.setSchema( schema );
}
return (PMML) unmarshaller.unmarshal( source );
} catch ( JAXBException e ) {
this.results.add( new PMMLError( e.toString() ) );
return null;
}
}
public static void dumpModel( PMML model, OutputStream target ) {
try {
JAXBContext jc = JAXBContext.newInstance( PMML.class.getPackage().getName() );
Marshaller marshaller = jc.createMarshaller();
marshaller.setProperty( Marshaller.JAXB_FORMATTED_OUTPUT, Boolean.TRUE );
marshaller.marshal( model, target );
} catch ( JAXBException e ) {
e.printStackTrace();
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.optimizations;
import com.facebook.presto.Session;
import com.facebook.presto.metadata.Metadata;
import com.facebook.presto.spi.GroupingProperty;
import com.facebook.presto.spi.LocalProperty;
import com.facebook.presto.spi.SortingProperty;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.planner.Partitioning;
import com.facebook.presto.sql.planner.PartitioningScheme;
import com.facebook.presto.sql.planner.PlanNodeIdAllocator;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.SymbolAllocator;
import com.facebook.presto.sql.planner.optimizations.StreamPropertyDerivations.StreamProperties;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.ApplyNode;
import com.facebook.presto.sql.planner.plan.DistinctLimitNode;
import com.facebook.presto.sql.planner.plan.EnforceSingleRowNode;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.ExplainAnalyzeNode;
import com.facebook.presto.sql.planner.plan.IndexJoinNode;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.LateralJoinNode;
import com.facebook.presto.sql.planner.plan.LimitNode;
import com.facebook.presto.sql.planner.plan.MarkDistinctNode;
import com.facebook.presto.sql.planner.plan.OutputNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.PlanVisitor;
import com.facebook.presto.sql.planner.plan.RowNumberNode;
import com.facebook.presto.sql.planner.plan.SemiJoinNode;
import com.facebook.presto.sql.planner.plan.SortNode;
import com.facebook.presto.sql.planner.plan.TableFinishNode;
import com.facebook.presto.sql.planner.plan.TableWriterNode;
import com.facebook.presto.sql.planner.plan.TopNNode;
import com.facebook.presto.sql.planner.plan.TopNRowNumberNode;
import com.facebook.presto.sql.planner.plan.UnionNode;
import com.facebook.presto.sql.planner.plan.WindowNode;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import static com.facebook.presto.SystemSessionProperties.getTaskConcurrency;
import static com.facebook.presto.SystemSessionProperties.getTaskWriterCount;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.FIXED_ARBITRARY_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.FIXED_HASH_DISTRIBUTION;
import static com.facebook.presto.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION;
import static com.facebook.presto.sql.planner.optimizations.StreamPreferredProperties.any;
import static com.facebook.presto.sql.planner.optimizations.StreamPreferredProperties.defaultParallelism;
import static com.facebook.presto.sql.planner.optimizations.StreamPreferredProperties.exactlyPartitionedOn;
import static com.facebook.presto.sql.planner.optimizations.StreamPreferredProperties.fixedParallelism;
import static com.facebook.presto.sql.planner.optimizations.StreamPreferredProperties.singleStream;
import static com.facebook.presto.sql.planner.optimizations.StreamPropertyDerivations.StreamProperties.StreamDistribution.SINGLE;
import static com.facebook.presto.sql.planner.plan.ChildReplacer.replaceChildren;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Scope.LOCAL;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Type.GATHER;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.Type.REPARTITION;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.gatheringExchange;
import static com.facebook.presto.sql.planner.plan.ExchangeNode.partitionedExchange;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
public class AddLocalExchanges
implements PlanOptimizer
{
private final Metadata metadata;
private final SqlParser parser;
public AddLocalExchanges(Metadata metadata, SqlParser parser)
{
this.metadata = requireNonNull(metadata, "metadata is null");
this.parser = requireNonNull(parser, "parser is null");
}
@Override
public PlanNode optimize(PlanNode plan, Session session, Map<Symbol, Type> types, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator)
{
PlanWithProperties result = plan.accept(new Rewriter(symbolAllocator, idAllocator, session), any());
return result.getNode();
}
private class Rewriter
extends PlanVisitor<PlanWithProperties, StreamPreferredProperties>
{
private final PlanNodeIdAllocator idAllocator;
private final Session session;
private final Map<Symbol, Type> types;
public Rewriter(SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator, Session session)
{
this.types = ImmutableMap.copyOf(symbolAllocator.getTypes());
this.idAllocator = idAllocator;
this.session = session;
}
@Override
protected PlanWithProperties visitPlan(PlanNode node, StreamPreferredProperties parentPreferences)
{
return planAndEnforceChildren(
node,
parentPreferences.withoutPreference().withDefaultParallelism(session),
parentPreferences.withDefaultParallelism(session));
}
@Override
public PlanWithProperties visitApply(ApplyNode node, StreamPreferredProperties parentPreferences)
{
throw new IllegalStateException("Unexpected node: " + node.getClass().getName());
}
@Override
public PlanWithProperties visitLateralJoin(LateralJoinNode node, StreamPreferredProperties parentPreferences)
{
throw new IllegalStateException("Unexpected node: " + node.getClass().getName());
}
@Override
public PlanWithProperties visitOutput(OutputNode node, StreamPreferredProperties parentPreferences)
{
return planAndEnforceChildren(
node,
any().withOrderSensitivity(),
any().withOrderSensitivity());
}
@Override
public PlanWithProperties visitExplainAnalyze(ExplainAnalyzeNode node, StreamPreferredProperties parentPreferences)
{
// Although explain analyze discards all output, we want to maintain the behavior
// of a normal output node, so declare the node to be order sensitive
return planAndEnforceChildren(
node,
singleStream().withOrderSensitivity(),
singleStream().withOrderSensitivity());
}
//
// Nodes that always require a single stream
//
@Override
public PlanWithProperties visitSort(SortNode node, StreamPreferredProperties parentPreferences)
{
// sort requires that all data be in one stream
// this node changes the input organization completely, so we do not pass through parent preferences
return planAndEnforceChildren(node, singleStream(), defaultParallelism(session));
}
@Override
public PlanWithProperties visitTableFinish(TableFinishNode node, StreamPreferredProperties parentPreferences)
{
// table commit requires that all data be in one stream
// this node changes the input organization completely, so we do not pass through parent preferences
return planAndEnforceChildren(node, singleStream(), defaultParallelism(session));
}
@Override
public PlanWithProperties visitTopN(TopNNode node, StreamPreferredProperties parentPreferences)
{
if (node.getStep().equals(TopNNode.Step.PARTIAL)) {
return planAndEnforceChildren(
node,
parentPreferences.withoutPreference().withDefaultParallelism(session),
parentPreferences.withDefaultParallelism(session));
}
// final topN requires that all data be in one stream
// also, a final changes the input organization completely, so we do not pass through parent preferences
return planAndEnforceChildren(
node,
singleStream(),
defaultParallelism(session));
}
@Override
public PlanWithProperties visitLimit(LimitNode node, StreamPreferredProperties parentPreferences)
{
if (node.isPartial()) {
return planAndEnforceChildren(
node,
parentPreferences.withoutPreference().withDefaultParallelism(session),
parentPreferences.withDefaultParallelism(session));
}
// final limit requires that all data be in one stream
// also, a final changes the input organization completely, so we do not pass through parent preferences
return planAndEnforceChildren(
node,
singleStream(),
defaultParallelism(session));
}
@Override
public PlanWithProperties visitDistinctLimit(DistinctLimitNode node, StreamPreferredProperties parentPreferences)
{
// final limit requires that all data be in one stream
StreamPreferredProperties requiredProperties;
StreamPreferredProperties preferredProperties;
if (node.isPartial()) {
requiredProperties = parentPreferences.withoutPreference().withDefaultParallelism(session);
preferredProperties = parentPreferences.withDefaultParallelism(session);
}
else {
// a final changes the input organization completely, so we do not pass through parent preferences
requiredProperties = singleStream();
preferredProperties = defaultParallelism(session);
}
return planAndEnforceChildren(node, requiredProperties, preferredProperties);
}
@Override
public PlanWithProperties visitEnforceSingleRow(EnforceSingleRowNode node, StreamPreferredProperties parentPreferences)
{
return planAndEnforceChildren(node, singleStream(), defaultParallelism(session));
}
//
// Nodes that require parallel streams to be partitioned
//
@Override
public PlanWithProperties visitAggregation(AggregationNode node, StreamPreferredProperties parentPreferences)
{
StreamPreferredProperties requiredProperties;
StreamPreferredProperties preferredChildProperties;
checkState(node.getStep() == AggregationNode.Step.SINGLE, "step of aggregation is expected to be SINGLE, but it is %s", node.getStep());
// aggregations would benefit from the finals being hash partitioned on groupId, however, we need to gather because the final HashAggregationOperator
// needs to know whether input was received at the query level.
if (node.getGroupingSets().stream().anyMatch(List::isEmpty)) {
return planAndEnforceChildren(node, singleStream(), defaultParallelism(session));
}
HashSet<Symbol> partitioningRequirement = new HashSet<>(node.getGroupingSets().get(0));
for (int i = 1; i < node.getGroupingSets().size(); i++) {
partitioningRequirement.retainAll(node.getGroupingSets().get(i));
}
requiredProperties = parentPreferences.withDefaultParallelism(session).withPartitioning(partitioningRequirement);
preferredChildProperties = parentPreferences.withDefaultParallelism(session)
.withPartitioning(partitioningRequirement);
return planAndEnforceChildren(node, requiredProperties, preferredChildProperties);
}
@Override
public PlanWithProperties visitWindow(WindowNode node, StreamPreferredProperties parentPreferences)
{
StreamPreferredProperties childRequirements = parentPreferences
.constrainTo(node.getSource().getOutputSymbols())
.withDefaultParallelism(session)
.withPartitioning(node.getPartitionBy());
PlanWithProperties child = planAndEnforce(node.getSource(), childRequirements, childRequirements);
List<LocalProperty<Symbol>> desiredProperties = new ArrayList<>();
if (!node.getPartitionBy().isEmpty()) {
desiredProperties.add(new GroupingProperty<>(node.getPartitionBy()));
}
for (Symbol symbol : node.getOrderBy()) {
desiredProperties.add(new SortingProperty<>(symbol, node.getOrderings().get(symbol)));
}
Iterator<Optional<LocalProperty<Symbol>>> matchIterator = LocalProperties.match(child.getProperties().getLocalProperties(), desiredProperties).iterator();
Set<Symbol> prePartitionedInputs = ImmutableSet.of();
if (!node.getPartitionBy().isEmpty()) {
Optional<LocalProperty<Symbol>> groupingRequirement = matchIterator.next();
Set<Symbol> unPartitionedInputs = groupingRequirement.map(LocalProperty::getColumns).orElse(ImmutableSet.of());
prePartitionedInputs = node.getPartitionBy().stream()
.filter(symbol -> !unPartitionedInputs.contains(symbol))
.collect(toImmutableSet());
}
int preSortedOrderPrefix = 0;
if (prePartitionedInputs.equals(ImmutableSet.copyOf(node.getPartitionBy()))) {
while (matchIterator.hasNext() && !matchIterator.next().isPresent()) {
preSortedOrderPrefix++;
}
}
WindowNode result = new WindowNode(
node.getId(),
child.getNode(),
node.getSpecification(),
node.getWindowFunctions(),
node.getHashSymbol(),
prePartitionedInputs,
preSortedOrderPrefix);
return deriveProperties(result, child.getProperties());
}
@Override
public PlanWithProperties visitMarkDistinct(MarkDistinctNode node, StreamPreferredProperties parentPreferences)
{
// mark distinct requires that all data partitioned
StreamPreferredProperties requiredProperties = parentPreferences.withDefaultParallelism(session).withPartitioning(node.getDistinctSymbols());
return planAndEnforceChildren(node, requiredProperties, requiredProperties);
}
@Override
public PlanWithProperties visitRowNumber(RowNumberNode node, StreamPreferredProperties parentPreferences)
{
// row number requires that all data be partitioned
StreamPreferredProperties requiredProperties = parentPreferences.withDefaultParallelism(session).withPartitioning(node.getPartitionBy());
return planAndEnforceChildren(node, requiredProperties, requiredProperties);
}
@Override
public PlanWithProperties visitTopNRowNumber(TopNRowNumberNode node, StreamPreferredProperties parentPreferences)
{
StreamPreferredProperties requiredProperties = parentPreferences.withDefaultParallelism(session);
// final topN row number requires that all data be partitioned
if (!node.isPartial()) {
requiredProperties = requiredProperties.withPartitioning(node.getPartitionBy());
}
return planAndEnforceChildren(node, requiredProperties, requiredProperties);
}
//
// Table Writer
//
@Override
public PlanWithProperties visitTableWriter(TableWriterNode node, StreamPreferredProperties parentPreferences)
{
StreamPreferredProperties requiredProperties;
StreamPreferredProperties preferredProperties;
if (getTaskWriterCount(session) > 1) {
requiredProperties = fixedParallelism();
preferredProperties = fixedParallelism();
}
else {
requiredProperties = singleStream();
preferredProperties = defaultParallelism(session);
}
return planAndEnforceChildren(node, requiredProperties, preferredProperties);
}
//
// Exchanges
//
@Override
public PlanWithProperties visitExchange(ExchangeNode node, StreamPreferredProperties parentPreferences)
{
checkArgument(node.getScope() != LOCAL, "AddLocalExchanges can not process a plan containing a local exchange");
// this node changes the input organization completely, so we do not pass through parent preferences
return planAndEnforceChildren(node, any(), defaultParallelism(session));
}
@Override
public PlanWithProperties visitUnion(UnionNode node, StreamPreferredProperties preferredProperties)
{
// Union is replaced with an exchange which does not retain streaming properties from the children
List<PlanWithProperties> sourcesWithProperties = node.getSources().stream()
.map(source -> source.accept(this, defaultParallelism(session)))
.collect(toImmutableList());
List<PlanNode> sources = sourcesWithProperties.stream()
.map(PlanWithProperties::getNode)
.collect(toImmutableList());
List<StreamProperties> inputProperties = sourcesWithProperties.stream()
.map(PlanWithProperties::getProperties)
.collect(toImmutableList());
List<List<Symbol>> inputLayouts = new ArrayList<>(sources.size());
for (int i = 0; i < sources.size(); i++) {
inputLayouts.add(node.sourceOutputLayout(i));
}
if (preferredProperties.isSingleStreamPreferred()) {
ExchangeNode exchangeNode = new ExchangeNode(
idAllocator.getNextId(),
GATHER,
LOCAL,
new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), node.getOutputSymbols()),
sources,
inputLayouts);
return deriveProperties(exchangeNode, inputProperties);
}
Optional<List<Symbol>> preferredPartitionColumns = preferredProperties.getPartitioningColumns();
if (preferredPartitionColumns.isPresent()) {
ExchangeNode exchangeNode = new ExchangeNode(
idAllocator.getNextId(),
REPARTITION,
LOCAL,
new PartitioningScheme(
Partitioning.create(FIXED_HASH_DISTRIBUTION, preferredPartitionColumns.get()),
node.getOutputSymbols(),
Optional.empty()),
sources,
inputLayouts);
return deriveProperties(exchangeNode, inputProperties);
}
// multiple streams preferred
ExchangeNode result = new ExchangeNode(
idAllocator.getNextId(),
REPARTITION,
LOCAL,
new PartitioningScheme(Partitioning.create(FIXED_ARBITRARY_DISTRIBUTION, ImmutableList.of()), node.getOutputSymbols()),
sources,
inputLayouts);
ExchangeNode exchangeNode = result;
return deriveProperties(exchangeNode, inputProperties);
}
//
// Joins
//
@Override
public PlanWithProperties visitJoin(JoinNode node, StreamPreferredProperties parentPreferences)
{
PlanWithProperties probe = planAndEnforce(
node.getLeft(),
defaultParallelism(session),
parentPreferences.constrainTo(node.getLeft().getOutputSymbols()).withDefaultParallelism(session));
// this build consumes the input completely, so we do not pass through parent preferences
List<Symbol> buildHashSymbols = Lists.transform(node.getCriteria(), JoinNode.EquiJoinClause::getRight);
StreamPreferredProperties buildPreference;
if (getTaskConcurrency(session) > 1) {
buildPreference = exactlyPartitionedOn(buildHashSymbols);
}
else {
buildPreference = singleStream();
}
PlanWithProperties build = planAndEnforce(node.getRight(), buildPreference, buildPreference);
return rebaseAndDeriveProperties(node, ImmutableList.of(probe, build));
}
@Override
public PlanWithProperties visitSemiJoin(SemiJoinNode node, StreamPreferredProperties parentPreferences)
{
PlanWithProperties source = planAndEnforce(
node.getSource(),
defaultParallelism(session),
parentPreferences.constrainTo(node.getSource().getOutputSymbols()).withDefaultParallelism(session));
// this filter source consumes the input completely, so we do not pass through parent preferences
PlanWithProperties filteringSource = planAndEnforce(node.getFilteringSource(), singleStream(), singleStream());
return rebaseAndDeriveProperties(node, ImmutableList.of(source, filteringSource));
}
@Override
public PlanWithProperties visitIndexJoin(IndexJoinNode node, StreamPreferredProperties parentPreferences)
{
PlanWithProperties probe = planAndEnforce(
node.getProbeSource(),
defaultParallelism(session),
parentPreferences.constrainTo(node.getProbeSource().getOutputSymbols()).withDefaultParallelism(session));
// index source does not support local parallel and must produce a single stream
StreamProperties indexStreamProperties = derivePropertiesRecursively(node.getIndexSource());
checkArgument(indexStreamProperties.getDistribution() == SINGLE, "index source must be single stream");
PlanWithProperties index = new PlanWithProperties(node.getIndexSource(), indexStreamProperties);
return rebaseAndDeriveProperties(node, ImmutableList.of(probe, index));
}
//
// Helpers
//
private PlanWithProperties planAndEnforceChildren(PlanNode node, StreamPreferredProperties requiredProperties, StreamPreferredProperties preferredProperties)
{
// plan and enforce each child, but strip any requirement not in terms of symbols produced from the child
// Note: this assumes the child uses the same symbols as the parent
List<PlanWithProperties> children = node.getSources().stream()
.map(source -> planAndEnforce(
source,
requiredProperties.constrainTo(source.getOutputSymbols()),
preferredProperties.constrainTo(source.getOutputSymbols())))
.collect(toImmutableList());
return rebaseAndDeriveProperties(node, children);
}
private PlanWithProperties planAndEnforce(PlanNode node, StreamPreferredProperties requiredProperties, StreamPreferredProperties preferredProperties)
{
// verify properties are in terms of symbols produced by the node
List<Symbol> outputSymbols = node.getOutputSymbols();
checkArgument(requiredProperties.getPartitioningColumns().map(outputSymbols::containsAll).orElse(true));
checkArgument(preferredProperties.getPartitioningColumns().map(outputSymbols::containsAll).orElse(true));
// plan the node using the preferred properties
PlanWithProperties result = node.accept(this, preferredProperties);
// enforce the required properties
result = enforce(result, requiredProperties);
return result;
}
private PlanWithProperties enforce(PlanWithProperties planWithProperties, StreamPreferredProperties requiredProperties)
{
if (requiredProperties.isSatisfiedBy(planWithProperties.getProperties())) {
return planWithProperties;
}
if (requiredProperties.isSingleStreamPreferred()) {
ExchangeNode exchangeNode = gatheringExchange(idAllocator.getNextId(), LOCAL, planWithProperties.getNode());
return deriveProperties(exchangeNode, planWithProperties.getProperties());
}
Optional<List<Symbol>> requiredPartitionColumns = requiredProperties.getPartitioningColumns();
if (!requiredPartitionColumns.isPresent()) {
// unpartitioned parallel streams required
ExchangeNode exchangeNode = partitionedExchange(
idAllocator.getNextId(),
LOCAL,
planWithProperties.getNode(),
new PartitioningScheme(Partitioning.create(FIXED_ARBITRARY_DISTRIBUTION, ImmutableList.of()), planWithProperties.getNode().getOutputSymbols()));
return deriveProperties(exchangeNode, planWithProperties.getProperties());
}
if (requiredProperties.isParallelPreferred()) {
// partitioned parallel streams required
ExchangeNode exchangeNode = partitionedExchange(
idAllocator.getNextId(),
LOCAL,
planWithProperties.getNode(),
requiredPartitionColumns.get(),
Optional.empty());
return deriveProperties(exchangeNode, planWithProperties.getProperties());
}
// no explicit parallel requirement, so gather to a single stream
ExchangeNode exchangeNode = gatheringExchange(
idAllocator.getNextId(),
LOCAL,
planWithProperties.getNode());
return deriveProperties(exchangeNode, planWithProperties.getProperties());
}
private PlanWithProperties rebaseAndDeriveProperties(PlanNode node, List<PlanWithProperties> children)
{
PlanNode result = replaceChildren(
node,
children.stream()
.map(PlanWithProperties::getNode)
.collect(toList()));
List<StreamProperties> inputProperties = children.stream()
.map(PlanWithProperties::getProperties)
.collect(toImmutableList());
return deriveProperties(result, inputProperties);
}
private PlanWithProperties deriveProperties(PlanNode result, StreamProperties inputProperties)
{
return new PlanWithProperties(result, StreamPropertyDerivations.deriveProperties(result, inputProperties, metadata, session, types, parser));
}
private PlanWithProperties deriveProperties(PlanNode result, List<StreamProperties> inputProperties)
{
return new PlanWithProperties(result, StreamPropertyDerivations.deriveProperties(result, inputProperties, metadata, session, types, parser));
}
private StreamProperties derivePropertiesRecursively(PlanNode node)
{
List<StreamProperties> inputProperties = node.getSources().stream()
.map(this::derivePropertiesRecursively)
.collect(toImmutableList());
return StreamPropertyDerivations.deriveProperties(node, inputProperties, metadata, session, types, parser);
}
}
private static class PlanWithProperties
{
private final PlanNode node;
private final StreamProperties properties;
public PlanWithProperties(PlanNode node, StreamProperties properties)
{
this.node = requireNonNull(node, "node is null");
this.properties = requireNonNull(properties, "StreamProperties is null");
}
public PlanNode getNode()
{
return node;
}
public StreamProperties getProperties()
{
return properties;
}
}
}
| |
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2013 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.forceduser;
import java.awt.event.ActionEvent;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.swing.ImageIcon;
import javax.swing.JToggleButton;
import org.apache.commons.configuration.Configuration;
import org.apache.log4j.Logger;
import org.parosproxy.paros.Constant;
import org.parosproxy.paros.control.Control;
import org.parosproxy.paros.db.RecordContext;
import org.parosproxy.paros.extension.ExtensionAdaptor;
import org.parosproxy.paros.extension.ExtensionHook;
import org.parosproxy.paros.model.Model;
import org.parosproxy.paros.model.Session;
import org.parosproxy.paros.network.HttpMessage;
import org.parosproxy.paros.network.HttpSender;
import org.parosproxy.paros.view.View;
import org.zaproxy.zap.extension.api.API;
import org.zaproxy.zap.extension.users.ExtensionUserManagement;
import org.zaproxy.zap.model.Context;
import org.zaproxy.zap.model.ContextDataFactory;
import org.zaproxy.zap.network.HttpSenderListener;
import org.zaproxy.zap.users.User;
import org.zaproxy.zap.view.AbstractContextPropertiesPanel;
import org.zaproxy.zap.view.ContextPanelFactory;
import org.zaproxy.zap.view.ZapToggleButton;
/**
* The ForcedUser Extension allows ZAP user to force all requests that correspond to a given Context
* to be sent from the point of view of a User.
*/
public class ExtensionForcedUser extends ExtensionAdaptor implements ContextPanelFactory, HttpSenderListener,
ContextDataFactory {
/** The Constant EXTENSION DEPENDENCIES. */
private static final List<Class<?>> EXTENSION_DEPENDENCIES;
static {
// Prepare a list of Extensions on which this extension depends
List<Class<?>> dependencies = new ArrayList<>(1);
dependencies.add(ExtensionUserManagement.class);
EXTENSION_DEPENDENCIES = Collections.unmodifiableList(dependencies);
}
private static final String FORCED_USER_MODE_OFF_ICON_RESOURCE = "/resource/icon/16/forcedUserOff.png";
private static final String FORCED_USER_MODE_ON_ICON_RESOURCE = "/resource/icon/16/forcedUserOn.png";
private static final String BUTTON_LABEL_ON = Constant.messages.getString("forceduser.toolbar.button.on");
private static final String BUTTON_LABEL_OFF = Constant.messages
.getString("forceduser.toolbar.button.off");
private static final String BUTTON_LABEL_DISABLED = Constant.messages
.getString("forceduser.toolbar.button.disabled");
/** The NAME of the extension. */
public static final String NAME = "ExtensionForcedUser";
/** The Constant log. */
private static final Logger log = Logger.getLogger(ExtensionForcedUser.class);
/** The map of context panels. */
private Map<Integer, ContextForcedUserPanel> contextPanelsMap = new HashMap<>();
/** The map of forced users for each context. */
private Map<Integer, User> contextForcedUsersMap = new HashMap<>();
private ExtensionUserManagement extensionUserManagement;
private boolean forcedUserModeEnabled = false;
private ZapToggleButton forcedUserModeButton;
private ForcedUserAPI api;
/**
* Instantiates a new forced user extension.
*/
public ExtensionForcedUser() {
super();
initialize();
}
/**
* Initialize the extension.
*/
private void initialize() {
this.setName(NAME);
this.setOrder(202);
}
@Override
public void hook(ExtensionHook extensionHook) {
super.hook(extensionHook);
// Register this where needed
Model.getSingleton().addContextDataFactory(this);
if (getView() != null) {
// Factory for generating Session Context UserAuth panels
getView().addContextPanelFactory(this);
View.getSingleton().addMainToolbarButton(getForcedUserModeToggleButton());
}
// Register as Http Sender listener
HttpSender.addListener(this);
// Prepare API
this.api = new ForcedUserAPI(this);
API.getInstance().registerApiImplementor(api);
}
private void updateForcedUserModeToggleButtonEnabledState() {
if (getView() != null) {
forcedUserModeButton.setSelected(forcedUserModeEnabled);
}
}
protected void setForcedUserModeEnabled(boolean forcedUserModeEnabled) {
this.forcedUserModeEnabled = forcedUserModeEnabled;
updateForcedUserModeToggleButtonEnabledState();
}
private void setForcedUserModeToggleButtonState(boolean enabled) {
if (enabled) {
updateForcedUserModeToggleButtonEnabledState();
this.getForcedUserModeToggleButton().setEnabled(true);
} else {
this.forcedUserModeEnabled = false;
this.getForcedUserModeToggleButton().setSelected(false);
this.getForcedUserModeToggleButton().setEnabled(false);
}
}
private void updateForcedUserModeToggleButtonState() {
if (contextForcedUsersMap.isEmpty()) {
if (this.getForcedUserModeToggleButton().isEnabled())
this.setForcedUserModeToggleButtonState(false);
} else {
if (!this.getForcedUserModeToggleButton().isEnabled())
this.setForcedUserModeToggleButtonState(true);
}
}
private JToggleButton getForcedUserModeToggleButton() {
if (forcedUserModeButton == null) {
forcedUserModeButton = new ZapToggleButton();
forcedUserModeButton.setIcon(new ImageIcon(ExtensionForcedUser.class
.getResource(FORCED_USER_MODE_OFF_ICON_RESOURCE)));
forcedUserModeButton.setSelectedIcon(new ImageIcon(ExtensionForcedUser.class
.getResource(FORCED_USER_MODE_ON_ICON_RESOURCE)));
forcedUserModeButton.setToolTipText(BUTTON_LABEL_OFF);
forcedUserModeButton.setSelectedToolTipText(BUTTON_LABEL_ON);
forcedUserModeButton.setDisabledToolTipText(BUTTON_LABEL_DISABLED);
forcedUserModeButton.setEnabled(false); // Disable until login and one indicator flagged
forcedUserModeButton.addActionListener(new java.awt.event.ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
setForcedUserModeEnabled(getForcedUserModeToggleButton().isSelected());
}
});
}
return forcedUserModeButton;
}
protected ExtensionUserManagement getUserManagementExtension() {
if (extensionUserManagement == null) {
extensionUserManagement = (ExtensionUserManagement) Control.getSingleton().getExtensionLoader()
.getExtension(ExtensionUserManagement.NAME);
}
return extensionUserManagement;
}
public boolean isForcedUserModeEnabled() {
return forcedUserModeEnabled;
}
/**
* Sets the forced user for a context.
*
* @param contextId the context id
* @param user the user
*/
public void setForcedUser(int contextId, User user) {
if (user != null)
this.contextForcedUsersMap.put(contextId, user);
else
this.contextForcedUsersMap.remove(contextId);
this.updateForcedUserModeToggleButtonState();
}
/**
* Sets the forced user for a context, based on the user id.
*
* @param contextId the context id
* @param userId the user id
* @throws IllegalStateException if no user was found that matches the provided id.
*/
public void setForcedUser(int contextId, int userId) throws IllegalStateException {
User user = getUserManagementExtension().getContextUserAuthManager(contextId).getUserById(userId);
if (user == null)
throw new IllegalStateException("No user matching the provided id was found.");
setForcedUser(contextId, user);
}
/**
* Gets the forced user for a context.
*
* @param contextId the context id
* @return the forced user
*/
public User getForcedUser(int contextId) {
return this.contextForcedUsersMap.get(contextId);
}
@Override
public List<Class<?>> getDependencies() {
return EXTENSION_DEPENDENCIES;
}
@Override
public AbstractContextPropertiesPanel getContextPanel(Context context) {
ContextForcedUserPanel panel = this.contextPanelsMap.get(context.getIndex());
if (panel == null) {
panel = new ContextForcedUserPanel(this, context.getIndex());
this.contextPanelsMap.put(context.getIndex(), panel);
}
return panel;
}
@Override
public URL getURL() {
try {
return new URL(Constant.ZAP_HOMEPAGE);
} catch (MalformedURLException e) {
return null;
}
}
@Override
public int getOrder() {
// Make sure we load this extension after the user management extension so that we hook
// after it so that we register as a ContextData factory later so that our loadContextData
// is called after the Users' Extension so that the forced user was already loaded after a
// session loading
return ExtensionUserManagement.EXTENSION_ORDER + 10;
}
@Override
public String getAuthor() {
return Constant.ZAP_TEAM;
}
@Override
public void discardContexts() {
this.contextForcedUsersMap.clear();
this.contextPanelsMap.clear();
// Make sure the status of the toggle button is properly updated when changing the session
updateForcedUserModeToggleButtonState();
}
@Override
public void discardContext(Context ctx) {
this.contextForcedUsersMap.remove(ctx.getIndex());
this.contextPanelsMap.remove(ctx.getIndex());
// Make sure the status of the toggle button is properly updated when changing the session
updateForcedUserModeToggleButtonState();
}
@Override
public int getListenerOrder() {
// Later so any modifications or requested users are visible
return 9998;
}
@Override
public void onHttpRequestSend(HttpMessage msg, int initiator, HttpSender sender) {
if (!forcedUserModeEnabled || msg.getResponseBody() == null || msg.getRequestHeader().isImage()
|| (initiator == HttpSender.AUTHENTICATION_INITIATOR || initiator == HttpSender.CHECK_FOR_UPDATES_INITIATOR)) {
// Not relevant
return;
}
// The message is already being sent from the POV of another user
if (msg.getRequestingUser() != null)
return;
// Is the message in any of the contexts?
List<Context> contexts = Model.getSingleton().getSession().getContexts();
User requestingUser = null;
for (Context context : contexts) {
if (context.isInContext(msg.getRequestHeader().getURI().toString())) {
// Is there enough info
if (contextForcedUsersMap.containsKey(context.getIndex())) {
requestingUser = contextForcedUsersMap.get(context.getIndex());
break;
}
}
}
if (requestingUser == null || !requestingUser.isEnabled())
return;
if (log.isDebugEnabled()) {
log.debug("Modifying request message (" + msg.getRequestHeader().getURI() + ") to match user: "
+ requestingUser);
}
msg.setRequestingUser(requestingUser);
}
@Override
public void onHttpResponseReceive(HttpMessage msg, int initiator, HttpSender sender) {
// Nothing to do
}
@Override
public void loadContextData(Session session, Context context) {
try {
// Load the forced user id for this context
List<String> forcedUserS = session.getContextDataStrings(context.getIndex(),
RecordContext.TYPE_FORCED_USER_ID);
if (forcedUserS != null && forcedUserS.size() > 0) {
int forcedUserId = Integer.parseInt(forcedUserS.get(0));
setForcedUser(context.getIndex(), forcedUserId);
}
} catch (Exception e) {
log.error("Unable to load forced user.", e);
}
}
@Override
public void persistContextData(Session session, Context context) {
try {
// Save only if we have anything to save
if (getForcedUser(context.getIndex()) != null) {
session.setContextData(context.getIndex(), RecordContext.TYPE_FORCED_USER_ID,
Integer.toString(getForcedUser(context.getIndex()).getId()));
// Note: Do not persist whether the 'Forced User Mode' is enabled as there's no need
// for this and the mode can be easily enabled/disabled directly
} else {
// If we don't have a forced user, force deletion of any previous values
session.clearContextDataForType(context.getIndex(), RecordContext.TYPE_FORCED_USER_ID);
}
} catch (Exception e) {
log.error("Unable to persist forced user.", e);
}
}
@Override
public void exportContextData(Context ctx, Configuration config) {
User user = getForcedUser(ctx.getIndex());
if (user != null) {
config.setProperty("context.forceduser", user.getId());
} else {
config.setProperty("context.forceduser", -1);
}
}
@Override
public void importContextData(Context ctx, Configuration config) {
int id = config.getInt("context.forceduser");
if (id >= 0) {
this.setForcedUser(ctx.getIndex(), id);
}
}
/**
* No database tables used, so all supported
*/
@Override
public boolean supportsDb(String type) {
return true;
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.MemoryCompactionPolicy;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* The ongoing MemStore Compaction manager, dispatches a solo running compaction and interrupts
* the compaction if requested. The compaction is interrupted and stopped by CompactingMemStore,
* for example when another compaction needs to be started.
* Prior to compaction the MemStoreCompactor evaluates
* the compacting ratio and aborts the compaction if it is not worthy.
* The MemStoreScanner is used to traverse the compaction pipeline. The MemStoreScanner
* is included in internal store scanner, where all compaction logic is implemented.
* Threads safety: It is assumed that the compaction pipeline is immutable,
* therefore no special synchronization is required.
*/
@InterfaceAudience.Private
public class MemStoreCompactor {
// The upper bound for the number of segments we store in the pipeline prior to merging.
// This constant is subject to further experimentation.
// The external setting of the compacting MemStore behaviour
public static final String COMPACTING_MEMSTORE_THRESHOLD_KEY =
"hbase.hregion.compacting.pipeline.segments.limit";
// remaining with the same ("infinity") but configurable default for now
public static final int COMPACTING_MEMSTORE_THRESHOLD_DEFAULT = 1;
public static final long DEEP_OVERHEAD = ClassSize
.align(ClassSize.OBJECT
+ 4 * ClassSize.REFERENCE
// compactingMemStore, versionedList, action, isInterrupted (the reference)
// "action" is an enum and thus it is a class with static final constants,
// so counting only the size of the reference to it and not the size of the internals
+ 2 * Bytes.SIZEOF_INT // compactionKVMax, pipelineThreshold
+ ClassSize.ATOMIC_BOOLEAN // isInterrupted (the internals)
);
private static final Log LOG = LogFactory.getLog(MemStoreCompactor.class);
private final int pipelineThreshold; // the limit on the number of the segments in the pipeline
private CompactingMemStore compactingMemStore;
// a static version of the segment list from the pipeline
private VersionedSegmentsList versionedList;
// a flag raised when compaction is requested to stop
private final AtomicBoolean isInterrupted = new AtomicBoolean(false);
// the limit to the size of the groups to be later provided to MemStoreSegmentsIterator
private final int compactionKVMax;
/**
* Types of actions to be done on the pipeline upon MemStoreCompaction invocation.
* Note that every value covers the previous ones, i.e. if MERGE is the action it implies
* that the youngest segment is going to be flatten anyway.
*/
public enum Action {
NOOP,
FLATTEN, // flatten the youngest segment in the pipeline
MERGE, // merge all the segments in the pipeline into one
COMPACT // copy-compact the data of all the segments in the pipeline
}
private Action action = Action.FLATTEN;
public MemStoreCompactor(CompactingMemStore compactingMemStore,
MemoryCompactionPolicy compactionPolicy) {
this.compactingMemStore = compactingMemStore;
this.compactionKVMax = compactingMemStore.getConfiguration()
.getInt(HConstants.COMPACTION_KV_MAX, HConstants.COMPACTION_KV_MAX_DEFAULT);
initiateAction(compactionPolicy);
pipelineThreshold = // get the limit on the number of the segments in the pipeline
compactingMemStore.getConfiguration().getInt(COMPACTING_MEMSTORE_THRESHOLD_KEY,
COMPACTING_MEMSTORE_THRESHOLD_DEFAULT);
}
/**----------------------------------------------------------------------
* The request to dispatch the compaction asynchronous task.
* The method returns true if compaction was successfully dispatched, or false if there
* is already an ongoing compaction or no segments to compact.
*/
public boolean start() throws IOException {
if (!compactingMemStore.hasImmutableSegments()) { // no compaction on empty pipeline
return false;
}
// get a snapshot of the list of the segments from the pipeline,
// this local copy of the list is marked with specific version
versionedList = compactingMemStore.getImmutableSegments();
if (LOG.isDebugEnabled()) {
LOG.debug("Starting the In-Memory Compaction for store "
+ compactingMemStore.getStore().getColumnFamilyName());
}
doCompaction();
return true;
}
/**----------------------------------------------------------------------
* The request to cancel the compaction asynchronous task
* The compaction may still happen if the request was sent too late
* Non-blocking request
*/
public void stop() {
isInterrupted.compareAndSet(false, true);
}
/**----------------------------------------------------------------------
* The interface to check whether user requested the index-compaction
*/
public boolean isIndexCompaction() {
return (action == Action.MERGE);
}
/**----------------------------------------------------------------------
* Reset the interruption indicator and clear the pointers in order to allow good
* garbage collection
*/
private void releaseResources() {
isInterrupted.set(false);
versionedList = null;
}
/**----------------------------------------------------------------------
* Decide what to do with the new and old segments in the compaction pipeline.
* Implements basic in-memory compaction policy.
*/
private Action policy() {
if (isInterrupted.get()) { // if the entire process is interrupted cancel flattening
return Action.NOOP; // the compaction also doesn't start when interrupted
}
if (action == Action.COMPACT) { // compact according to the user request
LOG.debug("In-Memory Compaction Pipeline for store " + compactingMemStore.getFamilyName()
+ " is going to be compacted to the " + compactingMemStore.getIndexType() + ". Number of"
+ " cells before compaction is " + versionedList.getNumOfCells());
return Action.COMPACT;
}
// compaction shouldn't happen or doesn't worth it
// limit the number of the segments in the pipeline
int numOfSegments = versionedList.getNumOfSegments();
if (numOfSegments > pipelineThreshold) {
LOG.debug("In-Memory Compaction Pipeline for store " + compactingMemStore.getFamilyName()
+ " is going to be merged to the " + compactingMemStore.getIndexType()
+ ", as there are " + numOfSegments + " segments");
return Action.MERGE; // to avoid too many segments, merge now
}
// if nothing of the above, then just flatten the newly joined segment
LOG.debug("The youngest segment in the in-Memory Compaction Pipeline for store "
+ compactingMemStore.getFamilyName() + " is going to be flattened to the "
+ compactingMemStore.getIndexType());
return Action.FLATTEN;
}
/**----------------------------------------------------------------------
* The worker thread performs the compaction asynchronously.
* The solo (per compactor) thread only reads the compaction pipeline.
* There is at most one thread per memstore instance.
*/
private void doCompaction() {
ImmutableSegment result = null;
boolean resultSwapped = false;
Action nextStep = null;
try {
nextStep = policy();
if (nextStep == Action.NOOP) {
return;
}
if (nextStep == Action.FLATTEN) {
// Youngest Segment in the pipeline is with SkipList index, make it flat
compactingMemStore.flattenOneSegment(versionedList.getVersion());
return;
}
// Create one segment representing all segments in the compaction pipeline,
// either by compaction or by merge
if (!isInterrupted.get()) {
result = createSubstitution();
}
// Substitute the pipeline with one segment
if (!isInterrupted.get()) {
if (resultSwapped = compactingMemStore.swapCompactedSegments(
versionedList, result, (action==Action.MERGE))) {
// update the wal so it can be truncated and not get too long
compactingMemStore.updateLowestUnflushedSequenceIdInWAL(true); // only if greater
}
}
} catch (IOException e) {
LOG.debug("Interrupting the MemStore in-memory compaction for store "
+ compactingMemStore.getFamilyName());
Thread.currentThread().interrupt();
} finally {
// For the MERGE case, if the result was created, but swap didn't happen,
// we DON'T need to close the result segment (meaning its MSLAB)!
// Because closing the result segment means closing the chunks of all segments
// in the compaction pipeline, which still have ongoing scans.
if (nextStep != Action.MERGE) {
if ((result != null) && (!resultSwapped)) {
result.close();
}
}
releaseResources();
}
}
/**----------------------------------------------------------------------
* Creation of the ImmutableSegment either by merge or copy-compact of the segments of the
* pipeline, based on the Compactor Iterator. The new ImmutableSegment is returned.
*/
private ImmutableSegment createSubstitution() throws IOException {
ImmutableSegment result = null;
MemStoreSegmentsIterator iterator = null;
switch (action) {
case COMPACT:
iterator =
new MemStoreCompactorSegmentsIterator(versionedList.getStoreSegments(),
compactingMemStore.getComparator(),
compactionKVMax, compactingMemStore.getStore());
result = SegmentFactory.instance().createImmutableSegmentByCompaction(
compactingMemStore.getConfiguration(), compactingMemStore.getComparator(), iterator,
versionedList.getNumOfCells(), compactingMemStore.getIndexType());
iterator.close();
break;
case MERGE:
iterator =
new MemStoreMergerSegmentsIterator(versionedList.getStoreSegments(),
compactingMemStore.getComparator(),
compactionKVMax);
result = SegmentFactory.instance().createImmutableSegmentByMerge(
compactingMemStore.getConfiguration(), compactingMemStore.getComparator(), iterator,
versionedList.getNumOfCells(), versionedList.getStoreSegments(),
compactingMemStore.getIndexType());
iterator.close();
break;
default: throw new RuntimeException("Unknown action " + action); // sanity check
}
return result;
}
/**----------------------------------------------------------------------
* Initiate the action according to user config, after its default is Action.MERGE
*/
@VisibleForTesting
void initiateAction(MemoryCompactionPolicy compType) {
switch (compType){
case NONE: action = Action.NOOP;
break;
case BASIC: action = Action.MERGE;
break;
case EAGER: action = Action.COMPACT;
break;
default:
throw new RuntimeException("Unknown memstore type " + compType); // sanity check
}
}
}
| |
package com.marshmallowswisdom.liber.persistence;
import java.util.List;
import java.util.Map;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.EntityTransaction;
import javax.persistence.Persistence;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Join;
import javax.persistence.criteria.JoinType;
import javax.persistence.criteria.Root;
import com.marshmallowswisdom.liber.domain.Article;
import com.marshmallowswisdom.liber.domain.ArticleVersion;
import com.marshmallowswisdom.liber.domain.ContentFieldValue;
import com.marshmallowswisdom.liber.domain.Field;
import com.marshmallowswisdom.liber.domain.FieldValue;
import com.marshmallowswisdom.liber.domain.HierarchicalFieldValue;
import com.marshmallowswisdom.liber.domain.Type;
public class Repository {
private final EntityManagerFactory factory;
public Repository() {
factory = Persistence.createEntityManagerFactory( "liber" );
}
public ArticleVersion saveArticleVersion( ArticleVersion articleVersion ) {
final EntityManager entityManager = factory.createEntityManager();
final EntityTransaction transaction = entityManager.getTransaction();
transaction.begin();
articleVersion = entityManager.merge( articleVersion );
transaction.commit();
entityManager.close();
return articleVersion;
}
public Article saveNewArticle( Article article, ArticleVersion firstVersion ) {
final EntityManager entityManager = factory.createEntityManager();
final EntityTransaction transaction = entityManager.getTransaction();
transaction.begin();
firstVersion = entityManager.merge( firstVersion );
article = firstVersion.getArticle();
article.setLatestVersion( firstVersion );
transaction.commit();
entityManager.close();
return article;
}
public Article saveArticle( Article article ) {
final EntityManager entityManager = factory.createEntityManager();
final EntityTransaction transaction = entityManager.getTransaction();
transaction.begin();
article = entityManager.merge( article );
transaction.commit();
entityManager.close();
return article;
}
public Article retrieveArticle( final int id ) {
final EntityManager entityManager = factory.createEntityManager();
final CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
final CriteriaQuery<Article> query = criteriaBuilder.createQuery( Article.class );
final Root<Article> root = query.from( Article.class );
query.where( criteriaBuilder.equal( root.get( "id" ), id ) );
final Article article = entityManager.createQuery( query ).getSingleResult();
entityManager.close();
return article;
}
public List<Article> retrieveArticles( final Map<String, String> criteria ) {
final EntityManager entityManager = factory.createEntityManager();
final CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
final CriteriaQuery<Article> query = criteriaBuilder.createQuery( Article.class );
final Root<Article> root = query.from( Article.class );
Join<Article,ArticleVersion> version = root.join( "latestVersion", JoinType.LEFT );
for( String fieldName : criteria.keySet() ) {
Join<ArticleVersion,ContentFieldValue> value = version.join( "fieldValues",
JoinType.LEFT );
Join<ContentFieldValue,Field> field = value.join( "field", JoinType.LEFT );
query.where( criteriaBuilder.equal( field.get( "name" ), fieldName ) );
query.where( criteriaBuilder.equal( value.get( "value" ), criteria.get( fieldName ) ) );
}
final List<Article> articles = entityManager.createQuery( query ).getResultList();
entityManager.close();
return articles;
}
public void deleteArticle( final int id ) {
final EntityManager entityManager = factory.createEntityManager();
final Article article = entityManager.find( Article.class, id );
final EntityTransaction transaction = entityManager.getTransaction();
transaction.begin();
entityManager.remove( article );
transaction.commit();
entityManager.close();
}
public List<Field> retrieveFields() {
final EntityManager entityManager = factory.createEntityManager();
final CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
final CriteriaQuery<Field> query = criteriaBuilder.createQuery( Field.class );
final Root<Field> root = query.from( Field.class );
query.select( root );
final List<Field> fields = entityManager.createQuery( query ).getResultList();
entityManager.close();
return fields;
}
public Field retrieveField( final int id ) {
final EntityManager entityManager = factory.createEntityManager();
final CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
final CriteriaQuery<Field> query = criteriaBuilder.createQuery( Field.class );
final Root<Field> root = query.from( Field.class );
query.where( criteriaBuilder.equal( root.get( "id" ), id ) );
final Field field = entityManager.createQuery( query ).getSingleResult();
entityManager.close();
return field;
}
public Field saveField( final Field field ) {
final EntityManager entityManager = factory.createEntityManager();
final EntityTransaction transaction = entityManager.getTransaction();
transaction.begin();
final Field savedField = entityManager.merge( field );
transaction.commit();
entityManager.close();
return savedField;
}
public void deleteField( final int id ) {
final EntityManager entityManager = factory.createEntityManager();
final Field field = entityManager.find( Field.class, id );
final EntityTransaction transaction = entityManager.getTransaction();
transaction.begin();
entityManager.remove( field );
transaction.commit();
entityManager.close();
}
public HierarchicalFieldValue retrieveFieldValue( final int id ) {
final EntityManager entityManager = factory.createEntityManager();
final CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
final CriteriaQuery<HierarchicalFieldValue> query =
criteriaBuilder.createQuery( HierarchicalFieldValue.class );
final Root<HierarchicalFieldValue> root = query.from( HierarchicalFieldValue.class );
root.fetch( "childValues", JoinType.LEFT );
query.where( criteriaBuilder.equal( root.get( "id" ), id ) );
final HierarchicalFieldValue value = entityManager.createQuery( query ).getSingleResult();
entityManager.close();
return value;
}
public FieldValue saveFieldValue( final FieldValue value ) {
final EntityManager entityManager = factory.createEntityManager();
final EntityTransaction transaction = entityManager.getTransaction();
transaction.begin();
final FieldValue savedValue = entityManager.merge( value );
transaction.commit();
entityManager.close();
return savedValue;
}
public void deleteFieldValue( final int id ) {
final EntityManager entityManager = factory.createEntityManager();
final EntityTransaction transaction = entityManager.getTransaction();
transaction.begin();
final FieldValue value = entityManager.find( FieldValue.class, id );
final Field field = value.getField();
field.removeValue( value );
entityManager.remove( value );
entityManager.merge( field );
transaction.commit();
entityManager.close();
}
public List<Type> retrieveTypes() {
final EntityManager entityManager = factory.createEntityManager();
final CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
final CriteriaQuery<Type> query = criteriaBuilder.createQuery( Type.class );
final Root<Type> root = query.from( Type.class );
query.select( root );
final List<Type> types = entityManager.createQuery( query ).getResultList();
entityManager.close();
return types;
}
public Type retrieveType( final int id ) {
final EntityManager entityManager = factory.createEntityManager();
final CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
final CriteriaQuery<Type> query = criteriaBuilder.createQuery( Type.class );
final Root<Type> root = query.from( Type.class );
root.fetch( "fields", JoinType.LEFT );
query.where( criteriaBuilder.equal( root.get( "id" ), id ) );
final Type type = entityManager.createQuery( query ).getSingleResult();
entityManager.close();
return type;
}
public Type retrieveTypeByName( final String name ) {
final EntityManager entityManager = factory.createEntityManager();
final CriteriaBuilder criteriaBuilder = entityManager.getCriteriaBuilder();
final CriteriaQuery<Type> query = criteriaBuilder.createQuery( Type.class );
final Root<Type> root = query.from( Type.class );
query.where( criteriaBuilder.equal( root.get( "name" ), name ) );
final Type type = entityManager.createQuery( query ).setMaxResults( 1 ).getSingleResult();
entityManager.close();
return type;
}
public Type saveType( final Type type ) {
final EntityManager entityManager = factory.createEntityManager();
final EntityTransaction transaction = entityManager.getTransaction();
transaction.begin();
final Type savedType = entityManager.merge( type );
transaction.commit();
entityManager.close();
return savedType;
}
public void deleteType( final int id ) {
final EntityManager entityManager = factory.createEntityManager();
final Type type = entityManager.find( Type.class, id );
final EntityTransaction transaction = entityManager.getTransaction();
transaction.begin();
entityManager.remove( type );
transaction.commit();
entityManager.close();
}
}
| |
/*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
/**
* <p>
* Describes where an Amazon EC2 instance is running within an Amazon EC2 region.
* </p>
*/
public class Placement {
/**
* The availability zone in which an Amazon EC2 instance runs.
*/
private String availabilityZone;
/**
* The name of the <a>PlacementGroup</a> in which an Amazon EC2 instance
* runs. Placement groups are primarily used for launching High
* Performance Computing instances in the same group to ensure fast
* connection speeds.
*/
private String groupName;
/**
* The allowed tenancy of instances launched into the VPC. A value of
* default means instances can be launched with any tenancy; a value of
* dedicated means all instances launched into the VPC will be launched
* as dedicated tenancy regardless of the tenancy assigned to the
* instance at launch.
*/
private String tenancy;
/**
* Default constructor for a new Placement object. Callers should use the
* setter or fluent setter (with...) methods to initialize this object after creating it.
*/
public Placement() {}
/**
* Constructs a new Placement object.
* Callers should use the setter or fluent setter (with...) methods to
* initialize any additional object members.
*
* @param availabilityZone The availability zone in which an Amazon EC2
* instance runs.
*/
public Placement(String availabilityZone) {
this.availabilityZone = availabilityZone;
}
/**
* The availability zone in which an Amazon EC2 instance runs.
*
* @return The availability zone in which an Amazon EC2 instance runs.
*/
public String getAvailabilityZone() {
return availabilityZone;
}
/**
* The availability zone in which an Amazon EC2 instance runs.
*
* @param availabilityZone The availability zone in which an Amazon EC2 instance runs.
*/
public void setAvailabilityZone(String availabilityZone) {
this.availabilityZone = availabilityZone;
}
/**
* The availability zone in which an Amazon EC2 instance runs.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param availabilityZone The availability zone in which an Amazon EC2 instance runs.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Placement withAvailabilityZone(String availabilityZone) {
this.availabilityZone = availabilityZone;
return this;
}
/**
* The name of the <a>PlacementGroup</a> in which an Amazon EC2 instance
* runs. Placement groups are primarily used for launching High
* Performance Computing instances in the same group to ensure fast
* connection speeds.
*
* @return The name of the <a>PlacementGroup</a> in which an Amazon EC2 instance
* runs. Placement groups are primarily used for launching High
* Performance Computing instances in the same group to ensure fast
* connection speeds.
*/
public String getGroupName() {
return groupName;
}
/**
* The name of the <a>PlacementGroup</a> in which an Amazon EC2 instance
* runs. Placement groups are primarily used for launching High
* Performance Computing instances in the same group to ensure fast
* connection speeds.
*
* @param groupName The name of the <a>PlacementGroup</a> in which an Amazon EC2 instance
* runs. Placement groups are primarily used for launching High
* Performance Computing instances in the same group to ensure fast
* connection speeds.
*/
public void setGroupName(String groupName) {
this.groupName = groupName;
}
/**
* The name of the <a>PlacementGroup</a> in which an Amazon EC2 instance
* runs. Placement groups are primarily used for launching High
* Performance Computing instances in the same group to ensure fast
* connection speeds.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param groupName The name of the <a>PlacementGroup</a> in which an Amazon EC2 instance
* runs. Placement groups are primarily used for launching High
* Performance Computing instances in the same group to ensure fast
* connection speeds.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Placement withGroupName(String groupName) {
this.groupName = groupName;
return this;
}
/**
* The allowed tenancy of instances launched into the VPC. A value of
* default means instances can be launched with any tenancy; a value of
* dedicated means all instances launched into the VPC will be launched
* as dedicated tenancy regardless of the tenancy assigned to the
* instance at launch.
*
* @return The allowed tenancy of instances launched into the VPC. A value of
* default means instances can be launched with any tenancy; a value of
* dedicated means all instances launched into the VPC will be launched
* as dedicated tenancy regardless of the tenancy assigned to the
* instance at launch.
*/
public String getTenancy() {
return tenancy;
}
/**
* The allowed tenancy of instances launched into the VPC. A value of
* default means instances can be launched with any tenancy; a value of
* dedicated means all instances launched into the VPC will be launched
* as dedicated tenancy regardless of the tenancy assigned to the
* instance at launch.
*
* @param tenancy The allowed tenancy of instances launched into the VPC. A value of
* default means instances can be launched with any tenancy; a value of
* dedicated means all instances launched into the VPC will be launched
* as dedicated tenancy regardless of the tenancy assigned to the
* instance at launch.
*/
public void setTenancy(String tenancy) {
this.tenancy = tenancy;
}
/**
* The allowed tenancy of instances launched into the VPC. A value of
* default means instances can be launched with any tenancy; a value of
* dedicated means all instances launched into the VPC will be launched
* as dedicated tenancy regardless of the tenancy assigned to the
* instance at launch.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param tenancy The allowed tenancy of instances launched into the VPC. A value of
* default means instances can be launched with any tenancy; a value of
* dedicated means all instances launched into the VPC will be launched
* as dedicated tenancy regardless of the tenancy assigned to the
* instance at launch.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public Placement withTenancy(String tenancy) {
this.tenancy = tenancy;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (availabilityZone != null) sb.append("AvailabilityZone: " + availabilityZone + ", ");
if (groupName != null) sb.append("GroupName: " + groupName + ", ");
if (tenancy != null) sb.append("Tenancy: " + tenancy + ", ");
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAvailabilityZone() == null) ? 0 : getAvailabilityZone().hashCode());
hashCode = prime * hashCode + ((getGroupName() == null) ? 0 : getGroupName().hashCode());
hashCode = prime * hashCode + ((getTenancy() == null) ? 0 : getTenancy().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof Placement == false) return false;
Placement other = (Placement)obj;
if (other.getAvailabilityZone() == null ^ this.getAvailabilityZone() == null) return false;
if (other.getAvailabilityZone() != null && other.getAvailabilityZone().equals(this.getAvailabilityZone()) == false) return false;
if (other.getGroupName() == null ^ this.getGroupName() == null) return false;
if (other.getGroupName() != null && other.getGroupName().equals(this.getGroupName()) == false) return false;
if (other.getTenancy() == null ^ this.getTenancy() == null) return false;
if (other.getTenancy() != null && other.getTenancy().equals(this.getTenancy()) == false) return false;
return true;
}
}
| |
/**
* MegaMek - Copyright (C) 2003 Ben Mazur (bmazur@sev.org)
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*/
package megamek.common;
import java.util.*;
import java.io.*;
public abstract class TurnOrdered implements Serializable
{
protected InitiativeRoll initiative = new InitiativeRoll();
protected int turns_mech = 0;
protected int turns_tank = 0;
protected int turns_infantry = 0;
public int getMechCount() {
return turns_mech;
}
public int getTankCount() {
return turns_tank;
}
public int getInfantryCount() {
return turns_infantry;
}
public InitiativeRoll getInitiative() {
return initiative;
}
public static void rollInitiative(Vector v)
{
// Clear all rolls
for (Enumeration i = v.elements(); i.hasMoreElements();) {
final TurnOrdered item = (TurnOrdered)i.nextElement();
item.getInitiative().clear();
}
rollInitAndResolveTies(v, null);
//This is the *auto-reroll* code for the Tactical Genius (lvl 3)
// pilot ability. It is NOT CURRENTLY IMPLEMENTED. This code may
// be incomplete/buggy/just plain wrong.
/**
if (v.firstElement() instanceof Team) {
//find highest init roll
int highestInit = 2;
for (Enumeration i = v.elements(); i.hasMoreElements();) {
final TurnOrdered item = (TurnOrdered)i.nextElement();
highestInit = Math.max(item.getInitiative().getRoll(item.getInitiative().size() - 1), highestInit);
}
System.out.println("\n\n--->HIGH INIT ROLL: " + highestInit);
//loop through teams
for (Enumeration i = v.elements(); i.hasMoreElements();) {
final TurnOrdered item = (TurnOrdered)i.nextElement();
//loop through players
for (Enumeration j = ((Team)item).getPlayers(); j.hasMoreElements();) {
final Player player = (Player)j.nextElement();
if (player.getGame().hasTacticalGenius(player) &&
item.getInitiative().getRoll(item.getInitiative().size() - 1) < highestInit && v.size() < 3) {
System.out.println("-->AUTO REROLL: " + player.getName());
Vector rv = new Vector();
rv.addElement(item);
rollInitAndResolveTies(v, rv);
}
}
}
}
*/
}
// This takes a vector of TurnOrdered (Teams or Players), rolls
// initiative, and resolves ties. The second argument is used
// when a specific teams initiative should be re-rolled.
public static void rollInitAndResolveTies(Vector v, Vector rerollRequests) {
for (Enumeration i = v.elements(); i.hasMoreElements();) {
final TurnOrdered item = (TurnOrdered)i.nextElement();
if (rerollRequests == null) { //normal init roll
item.getInitiative().addRoll(); // add a roll for all teams
} else {
//Resolve Tactical Genius (lvl 3) pilot ability
for (Enumeration j = rerollRequests.elements(); j.hasMoreElements();) {
final TurnOrdered rerollItem = (TurnOrdered)j.nextElement();
if (item == rerollItem) { // this is the team re-rolling
item.getInitiative().replaceRoll();
break; // each team only needs one reroll
}
}
}
}
// check for ties
Vector ties = new Vector();
for (Enumeration i = v.elements(); i.hasMoreElements();) {
final TurnOrdered item = (TurnOrdered)i.nextElement();
ties.removeAllElements();
ties.addElement(item);
for (Enumeration j = v.elements(); j.hasMoreElements();) {
final TurnOrdered other = (TurnOrdered)j.nextElement();
if (item != other && item.getInitiative().equals(other.getInitiative())) {
ties.addElement(other);
}
}
if (ties.size() > 1) {
System.out.println("->TIE: " + ((Team)item).getId());
rollInitAndResolveTies(ties, null); // null should be reroll? debug
}
}
}
// This takes a vector of TurnOrdered, and generates a new vector.
public static TurnVectors generateTurnOrder(Vector v, boolean infLast)
{
int[] num_inf_turns = new int[v.size()];
int[] num_oth_turns = new int[v.size()];
int total_inf_turns = 0;
int total_oth_turns = 0;
int idx;
TurnOrdered[] order = new TurnOrdered[v.size()];
int oi = 0;
com.sun.java.util.collections.ArrayList plist =
new com.sun.java.util.collections.ArrayList(v.size());
for (Enumeration i = v.elements(); i.hasMoreElements();) {
Object item = i.nextElement();
plist.add(item);
}
com.sun.java.util.collections.Collections.sort(plist, new com.sun.java.util.collections.Comparator() {
public int compare(Object o1, Object o2) {
return ((TurnOrdered)o1).getInitiative().compareTo(((TurnOrdered)o2).getInitiative());
}
});
for (com.sun.java.util.collections.Iterator i = plist.iterator(); i.hasNext();) {
final TurnOrdered item = (TurnOrdered)i.next();
order[oi] = item;
// If infantry are last, separate them. Otherwise, place all 'turns' in one pile
if (infLast) {
num_inf_turns[oi] = item.getInfantryCount();
num_oth_turns[oi] = item.getTankCount() + item.getMechCount();
} else {
num_inf_turns[oi] = 0;
num_oth_turns[oi] = item.getTankCount() +
item.getMechCount() + item.getInfantryCount();
}
total_inf_turns += num_inf_turns[oi];
total_oth_turns += num_oth_turns[oi];
oi++;
}
int min;
int turns_left;
TurnVectors turns = new TurnVectors(total_oth_turns, total_inf_turns);
// We will do the 'other' units first (mechs and vehicles, and if infLast is false,
// infantry )
min = Integer.MAX_VALUE;
for(idx = 0; idx < oi ; idx++) {
if ( num_oth_turns[idx] != 0 && num_oth_turns[idx] < min)
min = num_oth_turns[idx];
}
turns_left = total_oth_turns;
while(turns_left > 0) {
for(idx = 0; idx < oi; idx++) {
// If you have no turns here, skip
if (num_oth_turns[idx] == 0)
continue;
/* If you have less than twice the lowest, move 1. Otherwise, move more. */
int ntm = (int)Math.floor( ((double)num_oth_turns[idx]) / ((double)min) );
for (int j = 0; j < ntm; j++) {
turns.non_infantry.addElement(order[idx]);
num_oth_turns[idx]--;
turns_left--;
}
}
// Since the smallest unit count had to place 1, reduce min)
min--;
}
// Now, we do the 'infantry' turns.
if (infLast) {
min = Integer.MAX_VALUE;
for(idx = 0; idx < oi ; idx++) {
if ( num_inf_turns[idx] != 0 && num_inf_turns[idx] < min)
min = num_inf_turns[idx];
}
turns_left = total_inf_turns;
while(turns_left > 0) {
for(idx = 0; idx < oi; idx++) {
// If you have no turns here, skip
if (num_inf_turns[idx] == 0)
continue;
/* If you have less than twice the lowest, move 1. Otherwise, move more. */
int ntm = (int)Math.floor( ((double)num_inf_turns[idx]) / ((double)min) );
for (int j = 0; j < ntm; j++) {
turns.infantry.addElement(order[idx]);
num_inf_turns[idx]--;
turns_left--;
}
}
// Since the smallest unit count had to place 1, reduce min)
min--;
}
}
return turns;
}
public abstract void updateTurnCount();
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2015 QNX Software Systems and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Peter Graves (QNX Software Systems) - Initial API and implementation
*******************************************************************************/
package org.eclipse.cdt.core.model.tests;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.util.Arrays;
import java.util.List;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.eclipse.cdt.core.CCProjectNature;
import org.eclipse.cdt.core.CProjectNature;
import org.eclipse.cdt.core.dom.IPDOMManager;
import org.eclipse.cdt.core.model.CoreModel;
import org.eclipse.cdt.core.model.ElementChangedEvent;
import org.eclipse.cdt.core.model.IBinaryContainer;
import org.eclipse.cdt.core.model.ICContainer;
import org.eclipse.cdt.core.model.ICElement;
import org.eclipse.cdt.core.model.ICElementDelta;
import org.eclipse.cdt.core.model.ICProject;
import org.eclipse.cdt.core.model.IElementChangedListener;
import org.eclipse.cdt.core.model.ISourceRoot;
import org.eclipse.cdt.core.settings.model.COutputEntry;
import org.eclipse.cdt.core.settings.model.CSourceEntry;
import org.eclipse.cdt.core.settings.model.ICConfigurationDescription;
import org.eclipse.cdt.core.settings.model.ICOutputEntry;
import org.eclipse.cdt.core.settings.model.ICProjectDescription;
import org.eclipse.cdt.core.settings.model.ICSourceEntry;
import org.eclipse.cdt.core.testplugin.CProjectHelper;
import org.eclipse.cdt.core.testplugin.CTestPlugin;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IWorkspace;
import org.eclipse.core.resources.IWorkspaceDescription;
import org.eclipse.core.resources.IWorkspaceRoot;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Status;
import org.eclipse.ui.dialogs.IOverwriteQuery;
import org.eclipse.ui.wizards.datatransfer.FileSystemStructureProvider;
import org.eclipse.ui.wizards.datatransfer.ImportOperation;
import org.osgi.framework.Bundle;
/**
* This file contains a set of generic tests for the core C model. Nothing
* exotic, but should be a small sanity set of tests.
*/
public class CModelTests extends TestCase {
IWorkspace workspace;
IWorkspaceRoot root;
IProject project_c, project_cc;
NullProgressMonitor monitor;
/**
* Constructor for CModelTests.
* @param name
*/
public CModelTests(String name) {
super(name);
}
/**
* Sets up the test fixture.
*
* Called before every test case method.
*
* Example code test the packages in the project
* "com.qnx.tools.ide.cdt.core"
*/
@Override
protected void setUp() throws Exception {
/***
* The test of the tests assume that they have a working workspace
* and workspace root object to use to create projects/files in,
* so we need to get them setup first.
*/
IWorkspaceDescription desc;
workspace= ResourcesPlugin.getWorkspace();
root= workspace.getRoot();
monitor = new NullProgressMonitor();
if (workspace==null)
fail("Workspace was not setup");
if (root==null)
fail("Workspace root was not setup");
desc=workspace.getDescription();
desc.setAutoBuilding(false);
workspace.setDescription(desc);
}
/**
* Tears down the test fixture.
*
* Called after every test case method.
*/
@Override
protected void tearDown() {
// release resources here and clean-up
}
public static TestSuite suite() {
return new TestSuite(CModelTests.class);
}
public static void main (String[] args){
junit.textui.TestRunner.run(suite());
}
/***
* The follow are a simple set of tests to make usre the HasC/CCNature calls
* seem to be sane.
*
* Assumes that the CProjectHelper.createCProject properly creates a C
* project with a C nature, but does not add the CC nature.
* It also assums that the AddCCNature call works
*
* @see CProjectHelper#createCProject
* @see CoreModel#addCCNature
*/
public void testHasNature() throws CoreException {
ICProject testProject;
testProject=CProjectHelper.createCProject("naturetest", "none", IPDOMManager.ID_NO_INDEXER);
if (testProject==null)
fail("Unable to create project");
assertTrue("hasCNature works", CoreModel.hasCNature(testProject.getProject()));
assertTrue("hasCCNature works without ccnature", !(CoreModel.hasCCNature(testProject.getProject())));
CCProjectNature.addCCNature(testProject.getProject(), monitor);
assertTrue("hasCCNature works", (CoreModel.hasCCNature(testProject.getProject())));
CCProjectNature.removeCCNature(testProject.getProject(), monitor);
CProjectNature.removeCNature(testProject.getProject(), monitor);
assertTrue("hasCNature works without cnature", !CoreModel.hasCNature(testProject.getProject()));
assertTrue("hasCCNature works without ccnature or cnature", !(CoreModel.hasCCNature(testProject.getProject())));
try{
testProject.getProject().delete(true,true,monitor);
}
catch (CoreException e) {}
}
/***
* Simple tests to make sure the models file identification methods seem
* to work as expected.
*/
public void testFileType() throws CoreException,FileNotFoundException {
ICProject testProject;
testProject=CProjectHelper.createCProject("filetest", "none", IPDOMManager.ID_NO_INDEXER);
if (testProject==null)
fail("Unable to create project");
IFile file = testProject.getProject().getFile("exetest_g");
if (!file.exists()) {
file.create(new FileInputStream(
CTestPlugin.getDefault().getFileInPlugin(new Path("resources/exe/x86/o.g/exe_g"))),
false, monitor);
}
/***
* file should be a binary, executable, not shared or archive
*/
assertTrue("isBinary", CoreModel.getDefault().isBinary(file));
assertTrue("isExecutable", CoreModel.getDefault().isExecutable(file));
assertTrue("isSharedLib", !CoreModel.getDefault().isSharedLib(file));
assertTrue("isArchive", !CoreModel.getDefault().isArchive(file));
assertTrue("isObject", !CoreModel.getDefault().isObject(file));
assertTrue("isTranslationUnit", !CoreModel.isTranslationUnit(file));
file = testProject.getProject().getFile("exetest.c");
if (!file.exists()) {
file.create(new FileInputStream(
CTestPlugin.getDefault().getFileInPlugin(new Path("resources/exe/main.c"))),
false, monitor);
}
/***
* file should be a translation unit
*/
assertTrue("isBinary", !CoreModel.getDefault().isBinary(file));
assertTrue("isExecutable", !CoreModel.getDefault().isExecutable(file));
assertTrue("isSharedLib", !CoreModel.getDefault().isSharedLib(file));
assertTrue("isArchive", !CoreModel.getDefault().isArchive(file));
assertTrue("isObject", !CoreModel.getDefault().isObject(file));
assertTrue("isTranslationUnit", CoreModel.isTranslationUnit(file));
file = testProject.getProject().getFile("exetest.o");
if (!file.exists()) {
file.create(new FileInputStream(
CTestPlugin.getDefault().getFileInPlugin(new Path("resources/exe/x86/o.g/main.o"))),
false, monitor);
}
/***
* file should be a object file unit
*/
assertTrue("isBinary", CoreModel.getDefault().isBinary(file));
assertTrue("isExecutable", !CoreModel.getDefault().isExecutable(file));
assertTrue("isSharedLib", !CoreModel.getDefault().isSharedLib(file));
assertTrue("isArchive", !CoreModel.getDefault().isArchive(file));
assertTrue("isObject", CoreModel.getDefault().isObject(file));
assertTrue("isTranslationUnit", !CoreModel.isTranslationUnit(file));
file = testProject.getProject().getFile("liblibtest_g.so");
if (!file.exists()) {
file.create(new FileInputStream(
CTestPlugin.getDefault().getFileInPlugin(new Path("resources/testlib/x86/so.g/libtestlib_g.so"))),
false, monitor);
}
/***
* file should be a sharedlib/binary file
*/
assertTrue("isBinary", CoreModel.getDefault().isBinary(file));
assertTrue("isExecutable", !CoreModel.getDefault().isExecutable(file));
assertTrue("isSharedLib", CoreModel.getDefault().isSharedLib(file));
assertTrue("isArchive", !CoreModel.getDefault().isArchive(file));
assertTrue("isObject", !CoreModel.getDefault().isObject(file));
assertTrue("isTranslationUnit", !CoreModel.isTranslationUnit(file));
file = testProject.getProject().getFile("liblibtest_g.a");
if (!file.exists()) {
file.create(new FileInputStream(
CTestPlugin.getDefault().getFileInPlugin(new Path("resources/testlib/x86/a.g/libtestlib_g.a"))),
false, monitor);
} else {
fail("Does not exist?");
}
/***
* file should be a archive file
*/
assertTrue("isArchive", CoreModel.getDefault().isArchive(file));
assertTrue("isBinary:", !CoreModel.getDefault().isBinary(file));
assertTrue("isExecutable", !CoreModel.getDefault().isExecutable(file));
assertTrue("isSharedLib", !CoreModel.getDefault().isSharedLib(file));
assertTrue("isArchive", CoreModel.getDefault().isArchive(file));
assertTrue("isObject", !CoreModel.getDefault().isObject(file));
assertTrue("isTranslationUnit", !CoreModel.isTranslationUnit(file));
try{
testProject.getProject().delete(true,true,monitor);
}
catch (CoreException e) {}
}
/****
* Some simple tests for isValidTranslationUnitName
*/
public void testIsValidTranslationUnitName() throws CoreException {
assertTrue("Invalid C file", !CoreModel.isValidTranslationUnitName(null, "notcfile"));
assertTrue("Invalid C file", !CoreModel.isValidTranslationUnitName(null, "not.c.file"));
assertTrue("Invalid C file", !CoreModel.isValidTranslationUnitName(null, "not.ca"));
assertTrue("Valid C file", CoreModel.isValidTranslationUnitName(null, "areal.c"));
}
// bug 275609
public void testSourceExclusionFilters_275609() throws Exception {
ICProject testProject;
testProject=CProjectHelper.createCProject("bug257609", "none", IPDOMManager.ID_NO_INDEXER);
if (testProject==null)
fail("Unable to create project");
IFolder testFolder = testProject.getProject().getFolder("test");
testFolder.create(true, true, monitor);
IFolder subFolder1 = testFolder.getFolder("1");
subFolder1.create(true, true, monitor);
IFolder subFolder2 = testFolder.getFolder("2");
subFolder2.create(true, true, monitor);
IFile file0 = testFolder.getFile("test0.c");
file0.create(new ByteArrayInputStream(new byte[0]), true, monitor);
IFile file1 = subFolder1.getFile("test1.c");
file1.create(new ByteArrayInputStream(new byte[0]), true, monitor);
IFile file2 = subFolder2.getFile("test2.c");
file2.create(new ByteArrayInputStream(new byte[0]), true, monitor);
List<ICElement> cSourceRoots = testProject.getChildrenOfType(ICElement.C_CCONTAINER);
assertEquals(1, cSourceRoots.size());
assertEquals(testProject.getElementName(), cSourceRoots.get(0).getElementName());
ISourceRoot sourceRoot = (ISourceRoot) cSourceRoots.get(0);
List<ICElement> cContainers = sourceRoot.getChildrenOfType(ICElement.C_CCONTAINER);
assertEquals(1, cContainers.size());
assertEquals("test", cContainers.get(0).getElementName());
ICContainer testContainer = (ICContainer) cContainers.get(0);
List<ICElement> subContainers = testContainer.getChildrenOfType(ICElement.C_CCONTAINER);
assertEquals(2, subContainers.size());
assertEquals("1", subContainers.get(0).getElementName());
assertEquals("2", subContainers.get(1).getElementName());
Object[] nonCResources= testContainer.getNonCResources();
assertEquals(0, nonCResources.length);
List<ICElement> tUnits = testContainer.getChildrenOfType(ICElement.C_UNIT);
assertEquals(1, tUnits.size());
assertEquals("test0.c", tUnits.get(0).getElementName());
ICProjectDescription prjDesc= CoreModel.getDefault().getProjectDescription(testProject.getProject(), true);
ICConfigurationDescription activeCfg= prjDesc.getActiveConfiguration();
assertNotNull(activeCfg);
// add filter to source entry
ICSourceEntry[] entries = activeCfg.getSourceEntries();
final String sourceEntryName = entries[0].getName();
final IPath[] exclusionPatterns = new IPath[] { new Path("test/*") };
ICSourceEntry entry = new CSourceEntry(sourceEntryName, exclusionPatterns, entries[0].getFlags());
activeCfg.setSourceEntries(new ICSourceEntry[] {entry});
// store the changed configuration
CoreModel.getDefault().setProjectDescription(testProject.getProject(), prjDesc);
cSourceRoots = testProject.getChildrenOfType(ICElement.C_CCONTAINER);
assertEquals(1, cSourceRoots.size());
assertEquals(testProject.getElementName(), cSourceRoots.get(0).getElementName());
sourceRoot = (ISourceRoot) cSourceRoots.get(0);
cContainers = sourceRoot.getChildrenOfType(ICElement.C_CCONTAINER);
assertEquals(1, cContainers.size());
assertEquals("test", cContainers.get(0).getElementName());
testContainer = (ICContainer) cContainers.get(0);
tUnits = testContainer.getChildrenOfType(ICElement.C_UNIT);
assertEquals(0, tUnits.size());
subContainers = testContainer.getChildrenOfType(ICElement.C_CCONTAINER);
assertEquals(0, subContainers.size());
nonCResources= testContainer.getNonCResources();
assertEquals(3, nonCResources.length);
assertEquals(subFolder1, nonCResources[0]);
assertEquals(subFolder2, nonCResources[1]);
assertEquals(file0, nonCResources[2]);
try {
testProject.getProject().delete(true,true,monitor);
}
catch (CoreException e) {}
}
// bug 179474
public void testSourceExclusionFilters_179474() throws Exception {
ICProject testProject;
testProject= CProjectHelper.createCProject("bug179474", "none", IPDOMManager.ID_NO_INDEXER);
if (testProject == null)
fail("Unable to create project");
IFolder subFolder = testProject.getProject().getFolder("sub");
subFolder.create(true, true, monitor);
IFile fileA = testProject.getProject().getFile("a.cpp");
fileA.create(new ByteArrayInputStream(new byte[0]), true, monitor);
IFile fileB = subFolder.getFile("b.cpp");
fileB.create(new ByteArrayInputStream(new byte[0]), true, monitor);
List<ICElement> cSourceRoots = testProject.getChildrenOfType(ICElement.C_CCONTAINER);
assertEquals(1, cSourceRoots.size());
assertEquals(testProject.getElementName(), cSourceRoots.get(0).getElementName());
ISourceRoot sourceRoot = (ISourceRoot) cSourceRoots.get(0);
List<ICElement> cContainers = sourceRoot.getChildrenOfType(ICElement.C_CCONTAINER);
assertEquals(1, cContainers.size());
assertEquals(subFolder.getName(), cContainers.get(0).getElementName());
ICContainer subContainer = (ICContainer) cContainers.get(0);
List<ICElement> tUnits = subContainer.getChildrenOfType(ICElement.C_UNIT);
assertEquals(1, tUnits.size());
assertEquals(fileB.getName(), tUnits.get(0).getElementName());
tUnits = sourceRoot.getChildrenOfType(ICElement.C_UNIT);
assertEquals(1, tUnits.size());
assertEquals(fileA.getName(), tUnits.get(0).getElementName());
ICProjectDescription prjDesc= CoreModel.getDefault().getProjectDescription(testProject.getProject(), true);
ICConfigurationDescription activeCfg= prjDesc.getActiveConfiguration();
assertNotNull(activeCfg);
// add filter to source entry
ICSourceEntry[] entries = activeCfg.getSourceEntries();
final String sourceEntryName = entries[0].getName();
final IPath[] exclusionPatterns = new IPath[] { new Path("**/*.cpp") };
ICSourceEntry entry = new CSourceEntry(sourceEntryName, exclusionPatterns, entries[0].getFlags());
activeCfg.setSourceEntries(new ICSourceEntry[] {entry});
// store the changed configuration
CoreModel.getDefault().setProjectDescription(testProject.getProject(), prjDesc);
cSourceRoots = testProject.getChildrenOfType(ICElement.C_CCONTAINER);
assertEquals(1, cSourceRoots.size());
assertEquals(testProject.getElementName(), cSourceRoots.get(0).getElementName());
sourceRoot = (ISourceRoot) cSourceRoots.get(0);
cContainers = sourceRoot.getChildrenOfType(ICElement.C_CCONTAINER);
assertEquals(1, cContainers.size());
assertEquals(subFolder.getName(), cContainers.get(0).getElementName());
subContainer = (ICContainer) cContainers.get(0);
tUnits = subContainer.getChildrenOfType(ICElement.C_UNIT);
assertEquals(0, tUnits.size());
tUnits = sourceRoot.getChildrenOfType(ICElement.C_UNIT);
assertEquals(0, tUnits.size());
Object[] nonCResources = subContainer.getNonCResources();
assertEquals(1, nonCResources.length);
assertEquals(fileB, nonCResources[0]);
nonCResources = sourceRoot.getNonCResources();
assertTrue(Arrays.asList(nonCResources).contains(fileA));
try {
testProject.getProject().delete(true,true,monitor);
}
catch (CoreException e) {}
}
// bug 294965
public void testBinaryInProjectRoot_294965() throws Exception {
ICProject testProject;
testProject = CProjectHelper.createCProject("bug294965", "none", IPDOMManager.ID_NO_INDEXER);
if (testProject == null) {
fail("Unable to create project");
}
CProjectHelper.addDefaultBinaryParser(testProject.getProject());
CProjectHelper.importSourcesFromPlugin(testProject, CTestPlugin.getDefault().getBundle(), "resources/exe/x86/o");
testProject.getProject().getFolder("out").create(true, true, monitor);
ICProjectDescription prjDesc= CoreModel.getDefault().getProjectDescription(testProject.getProject(), true);
ICConfigurationDescription cfg= prjDesc.getActiveConfiguration();
assertNotNull(cfg);
// add filter to source entry
ICSourceEntry[] entries = cfg.getSourceEntries();
final String sourceEntryName = entries[0].getName();
final IPath[] exclusionPatterns = new IPath[] { new Path("test/*") };
ICSourceEntry sourceEntry = new CSourceEntry(sourceEntryName, exclusionPatterns, entries[0].getFlags());
cfg.setSourceEntries(new ICSourceEntry[] { sourceEntry });
// set output entry
ICOutputEntry outputEntry = new COutputEntry(testProject.getProject().getFolder("out"), new IPath[0], 0);
cfg.getBuildSetting().setOutputDirectories(new ICOutputEntry[] { outputEntry });
assertEquals(outputEntry, cfg.getBuildSetting().getOutputDirectories()[0]);
// store the changed configuration
CoreModel.getDefault().setProjectDescription(testProject.getProject(), prjDesc, true, monitor);
testProject.close();
testProject.getProject().close(monitor);
testProject.getProject().open(monitor);
prjDesc= CoreModel.getDefault().getProjectDescription(testProject.getProject(), false);
cfg= prjDesc.getActiveConfiguration();
assertEquals(outputEntry, cfg.getBuildSetting().getOutputDirectories()[0]);
Object[] nonCResources = testProject.getNonCResources();
assertEquals(7, nonCResources.length);
try {
testProject.getProject().delete(true,true,monitor);
}
catch (CoreException e) {}
}
// bug 131165
public void testPickUpBinariesInNewFolder_131165() throws Exception {
ICProject testProject;
testProject = CProjectHelper.createCProject("bug131165", "none", IPDOMManager.ID_NO_INDEXER);
if (testProject == null) {
fail("Unable to create project");
}
CProjectHelper.addDefaultBinaryParser(testProject.getProject());
final IBinaryContainer bin = testProject.getBinaryContainer();
assertEquals(0, bin.getBinaries().length);
final boolean binContainerChanged[] = { false };
IElementChangedListener elementChangedListener = new IElementChangedListener() {
@Override
public void elementChanged(ElementChangedEvent event) {
ICElementDelta delta = event.getDelta();
processDelta(delta);
}
private boolean processDelta(ICElementDelta delta) {
if (delta.getElement().equals(bin)) {
synchronized (binContainerChanged) {
binContainerChanged[0] = true;
binContainerChanged.notify();
}
return true;
}
ICElementDelta[] childDeltas = delta.getChangedChildren();
for (ICElementDelta childDelta : childDeltas) {
if (processDelta(childDelta)) {
return true;
}
}
return false;
}
};
CoreModel.getDefault().addElementChangedListener(elementChangedListener );
Thread waiter = new Thread() {
@Override
public void run() {
synchronized (binContainerChanged) {
try {
binContainerChanged.wait(1000);
} catch (InterruptedException exc) {
}
}
}
};
waiter.start();
Thread.sleep(50);
// import with folder structure
importSourcesFromPlugin(testProject, CTestPlugin.getDefault().getBundle(), "resources/exe/x86");
// wait for delta notification
waiter.join(1000);
assertTrue(binContainerChanged[0]);
assertEquals(2, bin.getBinaries().length);
try {
testProject.getProject().delete(true,true,monitor);
}
catch (CoreException e) {}
}
// same as CprojectHelper.importSourcesFromPlugin(), but preserving folder structure
private static void importSourcesFromPlugin(ICProject project, Bundle bundle, String sources) throws CoreException {
try {
String baseDir= FileLocator.toFileURL(FileLocator.find(bundle, new Path(sources), null)).getFile();
ImportOperation importOp = new ImportOperation(project.getProject().getFullPath(),
new File(baseDir), FileSystemStructureProvider.INSTANCE, new IOverwriteQuery() {
@Override
public String queryOverwrite(String file) {
return ALL;
}});
importOp.setCreateContainerStructure(true);
importOp.run(new NullProgressMonitor());
}
catch (Exception e) {
throw new CoreException(new Status(IStatus.ERROR, CTestPlugin.PLUGIN_ID, 0, "Import Interrupted", e));
}
}
public void testBinaryContainerDeltaAfterCloseProjDeleteBin_349564() throws Exception {
ICProject testProject;
testProject = CProjectHelper.createCProject("bug349564", "none", IPDOMManager.ID_NO_INDEXER);
if (testProject == null) {
fail("Unable to create project");
}
CProjectHelper.addDefaultBinaryParser(testProject.getProject());
final IBinaryContainer bin = testProject.getBinaryContainer();
assertEquals(0, bin.getBinaries().length);
// import with folder structure
importSourcesFromPlugin(testProject, CTestPlugin.getDefault().getBundle(), "resources/exe/x86/o");
assertEquals(1, bin.getBinaries().length);
IResource resource = bin.getBinaries()[0].getResource();
final boolean binContainerChanged[] = { false };
IElementChangedListener elementChangedListener = new IElementChangedListener() {
@Override
public void elementChanged(ElementChangedEvent event) {
ICElementDelta delta = event.getDelta();
processDelta(delta);
}
private boolean processDelta(ICElementDelta delta) {
if (delta.getElement() instanceof IBinaryContainer) {
synchronized (binContainerChanged) {
binContainerChanged[0] = true;
binContainerChanged.notify();
}
return true;
}
ICElementDelta[] childDeltas = delta.getChangedChildren();
for (ICElementDelta childDelta : childDeltas) {
if (processDelta(childDelta)) {
return true;
}
}
return false;
}
};
Thread waiter = new Thread() {
@Override
public void run() {
synchronized (binContainerChanged) {
try {
binContainerChanged.wait(1000);
} catch (InterruptedException exc) {
}
}
}
};
CoreModel.getDefault().addElementChangedListener(elementChangedListener);
testProject.close();
testProject.open(monitor);
waiter.start();
Thread.sleep(50);
workspace.delete(new IResource[] { resource }, false, monitor);
// wait for delta notification
waiter.join(1000);
assertEquals(0, testProject.getBinaryContainer().getBinaries().length);
assertTrue(binContainerChanged[0]);
try {
testProject.getProject().delete(true, true, monitor);
}
catch (CoreException e) {}
}
}
| |
package io.digdag.cli;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.joran.JoranConfigurator;
import ch.qos.logback.core.joran.spi.JoranException;
import com.beust.jcommander.JCommander;
import com.beust.jcommander.MissingCommandException;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.ParameterException;
import io.digdag.cli.client.Archive;
import io.digdag.cli.client.Backfill;
import io.digdag.cli.client.Delete;
import io.digdag.cli.client.Kill;
import io.digdag.cli.client.Push;
import io.digdag.cli.client.Reschedule;
import io.digdag.cli.client.Retry;
import io.digdag.cli.client.ShowAttempt;
import io.digdag.cli.client.ShowAttempts;
import io.digdag.cli.client.ShowLog;
import io.digdag.cli.client.ShowSchedule;
import io.digdag.cli.client.ShowSession;
import io.digdag.cli.client.ShowTask;
import io.digdag.cli.client.ShowWorkflow;
import io.digdag.cli.client.Start;
import io.digdag.cli.client.Upload;
import io.digdag.cli.client.Version;
import org.slf4j.LoggerFactory;
import java.io.PrintStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
import static io.digdag.cli.SystemExitException.systemExit;
import static io.digdag.cli.ConfigUtil.defaultConfigPath;
import static io.digdag.core.agent.OperatorManager.formatExceptionMessage;
import static io.digdag.core.Version.buildVersion;
public class Main
{
private static final String PROGRAM_NAME = "digdag";
private final io.digdag.core.Version version;
private final PrintStream out;
private final PrintStream err;
public Main(io.digdag.core.Version version, PrintStream out, PrintStream err)
{
this.version = version;
this.out = out;
this.err = err;
}
public static class MainOptions
{
@Parameter(names = {"-help", "--help"}, help = true, hidden = true)
boolean help;
}
public static void main(String... args)
{
int code = new Main(buildVersion(), System.out, System.err).cli(args);
if (code != 0) {
System.exit(code);
}
}
public int cli(String... args)
{
if (args.length == 1 && args[0].equals("--version")) {
out.println(version.version());
return 0;
}
err.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z").format(new Date()) + ": Digdag v" + version);
boolean verbose = false;
MainOptions mainOpts = new MainOptions();
JCommander jc = new JCommander(mainOpts);
jc.setProgramName(PROGRAM_NAME);
jc.addCommand("init", new Init(out, err), "new");
jc.addCommand("run", new Run(out, err), "r");
jc.addCommand("check", new Check(out, err), "c");
jc.addCommand("scheduler", new Sched(version, out, err), "sched");
jc.addCommand("server", new Server(version, out, err));
jc.addCommand("push", new Push(version, out, err));
jc.addCommand("archive", new Archive(out, err));
jc.addCommand("upload", new Upload(version, out, err));
jc.addCommand("workflow", new ShowWorkflow(version, out, err), "workflows");
jc.addCommand("start", new Start(version, out, err));
jc.addCommand("retry", new Retry(version, out, err));
jc.addCommand("session", new ShowSession(version, out, err), "sessions");
jc.addCommand("attempts", new ShowAttempts(version, out, err));
jc.addCommand("attempt", new ShowAttempt(version, out, err));
jc.addCommand("reschedule", new Reschedule(version, out, err));
jc.addCommand("backfill", new Backfill(version, out, err));
jc.addCommand("log", new ShowLog(version, out, err), "logs");
jc.addCommand("kill", new Kill(version, out, err));
jc.addCommand("task", new ShowTask(version, out, err), "tasks");
jc.addCommand("schedule", new ShowSchedule(version, out, err), "schedules");
jc.addCommand("delete", new Delete(version, out, err));
jc.addCommand("version", new Version(version, out, err), "version");
jc.addCommand("selfupdate", new SelfUpdate(out, err));
try {
try {
jc.parse(args);
}
catch (MissingCommandException ex) {
throw usage(err, "available commands are: "+jc.getCommands().keySet());
}
catch (ParameterException ex) {
if (getParsedCommand(jc) == null) {
// go to Run.asImplicit section
}
else {
throw ex;
}
}
if (mainOpts.help) {
throw usage(err, null);
}
Command command = getParsedCommand(jc);
if (command == null) {
throw usage(err, null);
}
verbose = processCommonOptions(err, command);
command.main();
return 0;
}
catch (ParameterException ex) {
err.println("error: " + ex.getMessage());
return 1;
}
catch (SystemExitException ex) {
if (ex.getMessage() != null) {
err.println("error: " + ex.getMessage());
}
return ex.getCode();
}
catch (Exception ex) {
String message = formatExceptionMessage(ex);
if (message.trim().isEmpty()) {
// prevent silent crash
ex.printStackTrace(err);
}
else {
err.println("error: " + message);
if (verbose) {
ex.printStackTrace(err);
}
}
return 1;
}
}
private static Command getParsedCommand(JCommander jc)
{
String commandName = jc.getParsedCommand();
if (commandName == null) {
return null;
}
return (Command) jc.getCommands().get(commandName).getObjects().get(0);
}
private static boolean processCommonOptions(PrintStream err, Command command)
throws SystemExitException
{
if (command.help) {
throw command.usage(null);
}
boolean verbose;
switch (command.logLevel) {
case "error":
case "warn":
case "info":
verbose = false;
break;
case "debug":
case "trace":
verbose = true;
break;
default:
throw usage(err, "Unknown log level '"+command.logLevel+"'");
}
configureLogging(command.logLevel, command.logPath);
for (Map.Entry<String, String> pair : command.systemProperties.entrySet()) {
System.setProperty(pair.getKey(), pair.getValue());
}
return verbose;
}
private static void configureLogging(String level, String logPath)
{
LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
JoranConfigurator configurator = new JoranConfigurator();
configurator.setContext(context);
context.reset();
// logback uses system property to embed variables in XML file
Level lv = Level.toLevel(level.toUpperCase(), Level.DEBUG);
System.setProperty("digdag.log.level", lv.toString());
String name;
if (logPath.equals("-")) {
if (System.console() != null) {
name = "/digdag/cli/logback-color.xml";
} else {
name = "/digdag/cli/logback-console.xml";
}
} else {
System.setProperty("digdag.log.path", logPath);
name = "/digdag/cli/logback-file.xml";
}
try {
configurator.doConfigure(Main.class.getResource(name));
} catch (JoranException ex) {
throw new RuntimeException(ex);
}
}
// called also by Run
static SystemExitException usage(PrintStream err, String error)
{
err.println("Usage: digdag <command> [options...]");
err.println(" Local-mode commands:");
err.println(" new <path> create a new workflow project");
err.println(" r[un] <workflow.dig> run a workflow");
err.println(" c[heck] show workflow definitions");
err.println(" sched[uler] run a scheduler server");
err.println(" selfupdate update digdag to the latest version");
err.println("");
err.println(" Server-mode commands:");
err.println(" server start digdag server");
err.println("");
err.println(" Client-mode commands:");
err.println(" push <project-name> create and upload a new revision");
err.println(" start <project-name> <name> start a new session attempt of a workflow");
err.println(" retry <attempt-id> retry a session");
err.println(" kill <attempt-id> kill a running session attempt");
err.println(" backfill <project-name> <name> start sessions of a schedule for past times");
err.println(" reschedule skip sessions of a schedule to a future time");
err.println(" log <attempt-id> show logs of a session attempt");
err.println(" workflows [project-name] [name] show registered workflow definitions");
err.println(" schedules show registered schedules");
err.println(" sessions show sessions for all workflows");
err.println(" sessions <project-name> show sessions for all workflows in a project");
err.println(" sessions <project-name> <name> show sessions for a workflow");
err.println(" session <session-id> show a single session");
err.println(" attempts show attempts for all sessions");
err.println(" attempts <session-id> show attempts for a session");
err.println(" attempt <attempt-id> show a single attempt");
err.println(" tasks <attempt-id> show tasks of a session attempt");
err.println(" delete <project-name> delete a project");
err.println(" version show client and server version");
err.println("");
err.println(" Options:");
showCommonOptions(err);
if (error == null) {
err.println("Use `<command> --help` to see detailed usage of a command.");
return systemExit(null);
}
else {
return systemExit(error);
}
}
public static void showCommonOptions(PrintStream err)
{
err.println(" -L, --log PATH output log messages to a file (default: -)");
err.println(" -l, --log-level LEVEL log level (error, warn, info, debug or trace)");
err.println(" -X KEY=VALUE add a performance system config");
err.println(" -c, --config PATH.properties Configuration file (default: " + defaultConfigPath() + ")");
err.println("");
}
}
| |
package com.intellij.openapi.externalSystem.service.project.manage;
import com.intellij.ide.highlighter.ArchiveFileType;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.externalSystem.model.DataNode;
import com.intellij.openapi.externalSystem.model.Key;
import com.intellij.openapi.externalSystem.model.ProjectKeys;
import com.intellij.openapi.externalSystem.model.project.LibraryData;
import com.intellij.openapi.externalSystem.model.project.LibraryPathType;
import com.intellij.openapi.externalSystem.model.project.ProjectData;
import com.intellij.openapi.externalSystem.service.project.ExternalLibraryPathTypeMapper;
import com.intellij.openapi.externalSystem.service.project.IdeModifiableModelsProvider;
import com.intellij.openapi.externalSystem.service.project.IdeUIModifiableModelsProvider;
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil;
import com.intellij.openapi.externalSystem.util.ExternalSystemConstants;
import com.intellij.openapi.externalSystem.util.ExternalSystemUtil;
import com.intellij.openapi.externalSystem.util.Order;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.LibraryOrderEntry;
import com.intellij.openapi.roots.OrderEntry;
import com.intellij.openapi.roots.OrderRootType;
import com.intellij.openapi.roots.RootPolicy;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.LibraryTable;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.JarFileSystem;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.ArrayUtil;
import com.intellij.util.NotNullFunction;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.ContainerUtilRt;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.util.*;
/**
* @author Denis Zhdanov
* @since 2/15/12 11:32 AM
*/
@Order(ExternalSystemConstants.BUILTIN_LIBRARY_DATA_SERVICE_ORDER)
public class LibraryDataService extends AbstractProjectDataService<LibraryData, Library> {
private static final Logger LOG = Logger.getInstance("#" + LibraryDataService.class.getName());
@NotNull public static final NotNullFunction<String, File> PATH_TO_FILE = path -> new File(path);
@NotNull private final ExternalLibraryPathTypeMapper myLibraryPathTypeMapper;
public LibraryDataService(@NotNull ExternalLibraryPathTypeMapper mapper) {
myLibraryPathTypeMapper = mapper;
}
@NotNull
@Override
public Key<LibraryData> getTargetDataKey() {
return ProjectKeys.LIBRARY;
}
@Override
public void importData(@NotNull final Collection<DataNode<LibraryData>> toImport,
@Nullable final ProjectData projectData,
@NotNull final Project project,
@NotNull final IdeModifiableModelsProvider modelsProvider) {
for (DataNode<LibraryData> dataNode : toImport) {
importLibrary(dataNode.getData(), modelsProvider);
}
}
private void importLibrary(@NotNull final LibraryData toImport, @NotNull final IdeModifiableModelsProvider modelsProvider) {
Map<OrderRootType, Collection<File>> libraryFiles = prepareLibraryFiles(toImport);
final String libraryName = toImport.getInternalName();
Library library = modelsProvider.getLibraryByName(libraryName);
if (library != null) {
syncPaths(toImport, library, modelsProvider);
return;
}
library = modelsProvider.createLibrary(libraryName);
final Library.ModifiableModel libraryModel = modelsProvider.getModifiableLibraryModel(library);
registerPaths(toImport.isUnresolved(), libraryFiles, libraryModel, libraryName);
}
@NotNull
public Map<OrderRootType, Collection<File>> prepareLibraryFiles(@NotNull LibraryData data) {
Map<OrderRootType, Collection<File>> result = ContainerUtilRt.newHashMap();
for (LibraryPathType pathType : LibraryPathType.values()) {
Set<String> paths = data.getPaths(pathType);
if (paths.isEmpty()) {
continue;
}
result.put(myLibraryPathTypeMapper.map(pathType), ContainerUtil.map(paths, PATH_TO_FILE));
}
return result;
}
static void registerPaths(boolean unresolved,
@NotNull Map<OrderRootType, Collection<File>> libraryFiles,
@NotNull Library.ModifiableModel model,
@NotNull String libraryName) {
for (Map.Entry<OrderRootType, Collection<File>> entry : libraryFiles.entrySet()) {
for (File file : entry.getValue()) {
VirtualFile virtualFile = unresolved ? null : ExternalSystemUtil.refreshAndFindFileByIoFile(file);
if (virtualFile == null) {
if (!unresolved && ExternalSystemConstants.VERBOSE_PROCESSING && entry.getKey() == OrderRootType.CLASSES) {
LOG.warn(
String.format("Can't find %s of the library '%s' at path '%s'", entry.getKey(), libraryName, file.getAbsolutePath())
);
}
String url = VfsUtil.getUrlForLibraryRoot(file);
String[] urls = model.getUrls(entry.getKey());
if (!ArrayUtil.contains(url, urls)) {
model.addRoot(url, entry.getKey());
}
continue;
}
if (virtualFile.isDirectory()) {
VirtualFile[] files = model.getFiles(entry.getKey());
if (!ArrayUtil.contains(virtualFile, files)) {
model.addRoot(virtualFile, entry.getKey());
}
}
else {
VirtualFile root = virtualFile;
if (virtualFile.getFileType() instanceof ArchiveFileType) {
root = JarFileSystem.getInstance().getJarRootForLocalFile(virtualFile);
if (root == null) {
LOG.warn(String.format(
"Can't parse contents of the JAR file at path '%s' for the library '%s''", file.getAbsolutePath(), libraryName
));
continue;
}
}
VirtualFile[] files = model.getFiles(entry.getKey());
if (!ArrayUtil.contains(root, files)) {
model.addRoot(root, entry.getKey());
}
}
}
}
}
/**
* Remove orphan project libraries during postprocess phase (after execution of LibraryDependencyDataService#import)
* in order to use LibraryDataService.isOrphanProjectLibrary method properly
*/
@Override
public void postProcess(@NotNull Collection<DataNode<LibraryData>> toImport,
@Nullable ProjectData projectData,
@NotNull Project project,
@NotNull IdeModifiableModelsProvider modelsProvider) {
if (projectData == null) return;
// do not cleanup orphan project libraries if import runs from Project Structure Dialog
// since libraries order entries cannot be imported for modules in that case
// and hence #isOrphanProjectLibrary() method will work incorrectly
if (modelsProvider instanceof IdeUIModifiableModelsProvider) return;
final List<Library> orphanIdeLibraries = ContainerUtil.newSmartList();
final LibraryTable.ModifiableModel librariesModel = modelsProvider.getModifiableProjectLibrariesModel();
for (Library library : librariesModel.getLibraries()) {
if (!ExternalSystemApiUtil.isExternalSystemLibrary(library, projectData.getOwner())) continue;
if (isOrphanProjectLibrary(library, modelsProvider)) {
orphanIdeLibraries.add(library);
}
}
for (Library library : orphanIdeLibraries) {
String libraryName = library.getName();
if (libraryName != null) {
Library libraryToRemove = librariesModel.getLibraryByName(libraryName);
if (libraryToRemove != null) {
librariesModel.removeLibrary(libraryToRemove);
}
}
}
}
private void syncPaths(@NotNull final LibraryData externalLibrary,
@NotNull final Library ideLibrary,
@NotNull final IdeModifiableModelsProvider modelsProvider) {
if (externalLibrary.isUnresolved()) {
return;
}
final Map<OrderRootType, Set<String>> toRemove = ContainerUtilRt.newHashMap();
final Map<OrderRootType, Set<String>> toAdd = ContainerUtilRt.newHashMap();
for (LibraryPathType pathType : LibraryPathType.values()) {
OrderRootType ideType = myLibraryPathTypeMapper.map(pathType);
HashSet<String> toAddPerType = ContainerUtilRt.newHashSet(externalLibrary.getPaths(pathType));
toAdd.put(ideType, toAddPerType);
HashSet<String> toRemovePerType = ContainerUtilRt.newHashSet();
toRemove.put(ideType, toRemovePerType);
for (VirtualFile ideFile : ideLibrary.getFiles(ideType)) {
String idePath = ExternalSystemApiUtil.getLocalFileSystemPath(ideFile);
if (!toAddPerType.remove(idePath)) {
toRemovePerType.add(ideFile.getUrl());
}
}
}
if (toRemove.isEmpty() && toAdd.isEmpty()) {
return;
}
final Library.ModifiableModel libraryModel = modelsProvider.getModifiableLibraryModel(ideLibrary);
for (Map.Entry<OrderRootType, Set<String>> entry : toRemove.entrySet()) {
for (String path : entry.getValue()) {
libraryModel.removeRoot(path, entry.getKey());
}
}
for (Map.Entry<OrderRootType, Set<String>> entry : toAdd.entrySet()) {
Map<OrderRootType, Collection<File>> roots = ContainerUtilRt.newHashMap();
roots.put(entry.getKey(), ContainerUtil.map(entry.getValue(), PATH_TO_FILE));
registerPaths(externalLibrary.isUnresolved(), roots, libraryModel, externalLibrary.getInternalName());
}
}
private static boolean isOrphanProjectLibrary(@NotNull final Library library,
@NotNull final IdeModifiableModelsProvider modelsProvider) {
RootPolicy<Boolean> visitor = new RootPolicy<Boolean>() {
@Override
public Boolean visitLibraryOrderEntry(LibraryOrderEntry ideDependency, Boolean value) {
return !ideDependency.isModuleLevel() &&
(library == ideDependency.getLibrary() ||
(ideDependency.getLibrary() == null && StringUtil.equals(library.getName(), ideDependency.getLibraryName())));
}
};
for (Module module : modelsProvider.getModules()) {
for (OrderEntry entry : modelsProvider.getOrderEntries(module)) {
if (entry.accept(visitor, false)) return false;
}
}
return true;
}
}
| |
package com.example.lahirudhananjaya.library;
import android.content.Intent;
import android.content.IntentSender;
import android.os.Bundle;
import android.support.v7.app.ActionBarActivity;
import android.util.Log;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.GooglePlayServicesClient;
import com.google.android.gms.common.Scopes;
import com.google.android.gms.plus.PlusClient;
/**
* A base class to wrap communication with the Google Play Services PlusClient.
*/
public abstract class PlusBaseActivity extends ActionBarActivity
implements GooglePlayServicesClient.ConnectionCallbacks,
GooglePlayServicesClient.OnConnectionFailedListener {
private static final String TAG = PlusBaseActivity.class.getSimpleName();
// A magic number we will use to know that our sign-in error resolution activity has completed
private static final int OUR_REQUEST_CODE = 49404;
// A flag to stop multiple dialogues appearing for the user
private boolean mAutoResolveOnFail;
// A flag to track when a connection is already in progress
public boolean mPlusClientIsConnecting = false;
// This is the helper object that connects to Google Play Services.
private PlusClient mPlusClient;
// The saved result from {@link #onConnectionFailed(ConnectionResult)}. If a connection
// attempt has been made, this is non-null.
// If this IS null, then the connect method is still running.
private ConnectionResult mConnectionResult;
/**
* Called when the {@link PlusClient} revokes access to this app.
*/
protected abstract void onPlusClientRevokeAccess();
/**
* Called when the PlusClient is successfully connected.
*/
protected abstract void onPlusClientSignIn();
/**
* Called when the {@link PlusClient} is disconnected.
*/
protected abstract void onPlusClientSignOut();
/**
* Called when the {@link PlusClient} is blocking the UI. If you have a progress bar widget,
* this tells you when to show or hide it.
*/
protected abstract void onPlusClientBlockingUI(boolean show);
/**
* Called when there is a change in connection state. If you have "Sign in"/ "Connect",
* "Sign out"/ "Disconnect", or "Revoke access" buttons, this lets you know when their states
* need to be updated.
*/
protected abstract void updateConnectButtonState();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Initialize the PlusClient connection.
// Scopes indicate the information about the user your application will be able to access.
mPlusClient =
new PlusClient.Builder(this, this, this).setScopes(Scopes.PLUS_LOGIN,
Scopes.PLUS_ME).build();
}
/**
* Try to sign in the user.
*/
public void signIn() {
if (!mPlusClient.isConnected()) {
// Show the dialog as we are now signing in.
setProgressBarVisible(true);
// Make sure that we will start the resolution (e.g. fire the intent and pop up a
// dialog for the user) for any errors that come in.
mAutoResolveOnFail = true;
// We should always have a connection result ready to resolve,
// so we can start that process.
if (mConnectionResult != null) {
startResolution();
} else {
// If we don't have one though, we can start connect in
// order to retrieve one.
initiatePlusClientConnect();
}
}
updateConnectButtonState();
}
/**
* Connect the {@link PlusClient} only if a connection isn't already in progress. This will
* call back to {@link #onConnected(android.os.Bundle)} or
* {@link #onConnectionFailed(com.google.android.gms.common.ConnectionResult)}.
*/
private void initiatePlusClientConnect() {
if (!mPlusClient.isConnected() && !mPlusClient.isConnecting()) {
mPlusClient.connect();
}
}
/**
* Disconnect the {@link PlusClient} only if it is connected (otherwise, it can throw an error.)
* This will call back to {@link #onDisconnected()}.
*/
private void initiatePlusClientDisconnect() {
if (mPlusClient.isConnected()) {
mPlusClient.disconnect();
}
}
/**
* Sign out the user (so they can switch to another account).
*/
public void signOut() {
// We only want to sign out if we're connected.
if (mPlusClient.isConnected()) {
// Clear the default account in order to allow the user to potentially choose a
// different account from the account chooser.
mPlusClient.clearDefaultAccount();
// Disconnect from Google Play Services, then reconnect in order to restart the
// process from scratch.
initiatePlusClientDisconnect();
Log.v(TAG, "Sign out successful!");
}
updateConnectButtonState();
}
/**
* Revoke Google+ authorization completely.
*/
public void revokeAccess() {
if (mPlusClient.isConnected()) {
// Clear the default account as in the Sign Out.
mPlusClient.clearDefaultAccount();
// Revoke access to this entire application. This will call back to
// onAccessRevoked when it is complete, as it needs to reach the Google
// authentication servers to revoke all tokens.
mPlusClient.revokeAccessAndDisconnect(new PlusClient.OnAccessRevokedListener() {
public void onAccessRevoked(ConnectionResult result) {
updateConnectButtonState();
onPlusClientRevokeAccess();
}
});
}
}
@Override
protected void onStart() {
super.onStart();
initiatePlusClientConnect();
}
@Override
protected void onStop() {
super.onStop();
initiatePlusClientDisconnect();
}
public boolean isPlusClientConnecting() {
return mPlusClientIsConnecting;
}
private void setProgressBarVisible(boolean flag) {
mPlusClientIsConnecting = flag;
onPlusClientBlockingUI(flag);
}
/**
* A helper method to flip the mResolveOnFail flag and start the resolution
* of the ConnectionResult from the failed connect() call.
*/
private void startResolution() {
try {
// Don't start another resolution now until we have a result from the activity we're
// about to start.
mAutoResolveOnFail = false;
// If we can resolve the error, then call start resolution and pass it an integer tag
// we can use to track.
// This means that when we get the onActivityResult callback we'll know it's from
// being started here.
mConnectionResult.startResolutionForResult(this, OUR_REQUEST_CODE);
} catch (IntentSender.SendIntentException e) {
// Any problems, just try to connect() again so we get a new ConnectionResult.
mConnectionResult = null;
initiatePlusClientConnect();
}
}
/**
* An earlier connection failed, and we're now receiving the result of the resolution attempt
* by PlusClient.
*
* @see #onConnectionFailed(ConnectionResult)
*/
@Override
protected void onActivityResult(int requestCode, int responseCode, Intent intent) {
updateConnectButtonState();
if (requestCode == OUR_REQUEST_CODE && responseCode == RESULT_OK) {
// If we have a successful result, we will want to be able to resolve any further
// errors, so turn on resolution with our flag.
mAutoResolveOnFail = true;
// If we have a successful result, let's call connect() again. If there are any more
// errors to resolve we'll get our onConnectionFailed, but if not,
// we'll get onConnected.
initiatePlusClientConnect();
} else if (requestCode == OUR_REQUEST_CODE && responseCode != RESULT_OK) {
// If we've got an error we can't resolve, we're no longer in the midst of signing
// in, so we can stop the progress spinner.
setProgressBarVisible(false);
}
}
/**
* Successfully connected (called by PlusClient)
*/
@Override
public void onConnected(Bundle connectionHint) {
updateConnectButtonState();
setProgressBarVisible(false);
onPlusClientSignIn();
}
/**
* Successfully disconnected (called by PlusClient)
*/
@Override
public void onDisconnected() {
updateConnectButtonState();
onPlusClientSignOut();
}
/**
* Connection failed for some reason (called by PlusClient)
* Try and resolve the result. Failure here is usually not an indication of a serious error,
* just that the user's input is needed.
*
* @see #onActivityResult(int, int, Intent)
*/
@Override
public void onConnectionFailed(ConnectionResult result) {
updateConnectButtonState();
// Most of the time, the connection will fail with a user resolvable result. We can store
// that in our mConnectionResult property ready to be used when the user clicks the
// sign-in button.
if (result.hasResolution()) {
mConnectionResult = result;
if (mAutoResolveOnFail) {
// This is a local helper function that starts the resolution of the problem,
// which may be showing the user an account chooser or similar.
startResolution();
}
}
}
public PlusClient getPlusClient() {
return mPlusClient;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper.server.quorum;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Field;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import javax.security.sasl.SaslException;
import org.apache.jute.BinaryOutputArchive;
import org.apache.zookeeper.PortAssignment;
import org.apache.zookeeper.ZKTestCase;
import org.apache.zookeeper.server.ServerCnxn;
import org.apache.zookeeper.server.ServerCnxnFactory;
import org.apache.zookeeper.server.ZKDatabase;
import org.apache.zookeeper.server.ZooKeeperServer;
import org.apache.zookeeper.server.persistence.FileTxnSnapLog;
import org.apache.zookeeper.server.quorum.QuorumPeer.QuorumServer;
import org.apache.zookeeper.server.quorum.auth.NullQuorumAuthLearner;
import org.apache.zookeeper.server.quorum.auth.NullQuorumAuthServer;
import org.apache.zookeeper.server.quorum.auth.QuorumAuth;
import org.apache.zookeeper.server.quorum.auth.QuorumAuthLearner;
import org.apache.zookeeper.server.quorum.auth.QuorumAuthServer;
import org.apache.zookeeper.server.quorum.auth.SaslQuorumAuthLearner;
import org.apache.zookeeper.server.quorum.auth.SaslQuorumAuthServer;
import org.apache.zookeeper.server.quorum.flexible.QuorumMaj;
import org.apache.zookeeper.test.ClientBase;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.verify;
public class QuorumCnxManagerTest extends ZKTestCase {
private static final Logger LOG = LoggerFactory.getLogger(QuorumCnxManagerTest.class);
private int count;
private HashMap<Long,QuorumServer> peers;
private int peerQuorumPort[];
private int peerClientPort[];
private ThreadPoolExecutor executor;
/**
* The maximum number of threads to allow in the connectionExecutors thread
* pool which will be used to initiate quorum server connections. Defaulting to 20.
* TODO: Need to tune this param.
*/
private final int quorumCnxnThreadsSize = 20;
private Set<String> authzHosts;
private static File saslConfigFile = null;
@BeforeClass
public static void setupSasl() throws Exception {
String jaasEntries = new String(""
+ "QuorumServer {\n"
+ " org.apache.zookeeper.server.auth.DigestLoginModule required\n"
+ " user_test=\"mypassword\";\n"
+ "};\n"
+ "QuorumLearner {\n"
+ " org.apache.zookeeper.server.auth.DigestLoginModule required\n"
+ " username=\"test\"\n"
+ " password=\"mypassword\";\n"
+ "};\n"
+ "QuorumLearnerInvalid {\n"
+ " org.apache.zookeeper.server.auth.DigestLoginModule required\n"
+ " username=\"test\"\n"
+ " password=\"invalid\";\n"
+ "};\n");
saslConfigFile = File.createTempFile("jaas.", ".conf");
FileWriter fwriter = new FileWriter(saslConfigFile);
fwriter.write(jaasEntries);
fwriter.close();
System.setProperty("java.security.auth.login.config",
saslConfigFile.getAbsolutePath());
}
@AfterClass
public static void cleanupSasl() throws Exception {
if (saslConfigFile != null) {
saslConfigFile.delete();
}
}
@Before
public void setUp() throws Exception {
this.count = 3;
this.peers = new HashMap<Long,QuorumServer>(count);
peerQuorumPort = new int[count];
peerClientPort = new int[count];
authzHosts = new HashSet<String>();
for(int i = 0; i < count; i++) {
peerQuorumPort[i] = PortAssignment.unique();
peerClientPort[i] = PortAssignment.unique();
QuorumServer qs = new QuorumServer(i, "0.0.0.0",
peerQuorumPort[i], PortAssignment.unique(), null);
peers.put(Long.valueOf(i), qs);
authzHosts.add(qs.hostname);
}
executor = new ThreadPoolExecutor(3, 10,
60, TimeUnit.SECONDS, new SynchronousQueue<Runnable>());
}
@After
public void tearDown() throws Exception {
if (executor != null) {
executor.shutdownNow();
}
}
@Test(timeout = 30000)
public void testNoAuthConnection() throws Exception {
QuorumCnxManager peer0 = createAndStartManager(0);
QuorumCnxManager peer1 = createAndStartManager(1);
peer0.connectOne(1);
assertEventuallyConnected(peer0, 1);
}
@Test(timeout = 30000)
public void testAuthConnection() throws Exception {
QuorumCnxManager peer0 = createAndStartManager(0, "QuorumServer",
"QuorumLearner", true, true);
QuorumCnxManager peer1 = createAndStartManager(1, "QuorumServer",
"QuorumLearner", true, true);
peer0.connectOne(1);
assertEventuallyConnected(peer0, 1);
}
/**
* Peer0 has no auth configured, Peer1 has auth configured.
* Peer1 connects to peer0, because null auth server sees an auth packet and connection succeeds.
* Peer0 connects to peer1, but connection isn't initiated because
* peer0's sid is lower than peer1's
*/
@Test(timeout = 30000)
public void testClientAuthAgainstNoAuthServerWithLowerSid()
throws Exception {
QuorumCnxManager peer0 = createAndStartManager(0);
QuorumCnxManager peer1 = createAndStartManager(1, "QuorumServer",
"QuorumLearner", false, false);
peer1.connectOne(0);
peer0.connectOne(1);
assertEventuallyConnected(peer0, 1);
}
/**
* Peer0 has auth configured, Peer1 has no auth configured.
* Peer0 connects to peer1, but is disconnected, because peer1's sid is
* higher than peer0.
* Peer1 connects to peer0, but is disconnected, because peer1 cannot
* handle auth.
*/
@Test(timeout = 30000)
public void testClientAuthAgainstNoAuthServerWithHigherSid()
throws Exception {
QuorumCnxManager peer0 = createAndStartManager(0, "QuorumServer",
"QuorumLearner", false, false);
QuorumCnxManager peer1 = createAndStartManager(1);
peer0.connectOne(1);
peer1.connectOne(0);
assertEventuallyConnected(peer0, 1);
}
/**
* No auth learner connects to a server that requires auth, when the server
* has a higher sid.
* The connection should fail in both directions.
*/
@Test(timeout = 30000)
public void testNoAuthLearnerConnectToAuthRequiredServerWithLowerSid()
throws Exception {
QuorumCnxManager peer0 = createAndStartManager(0, "QuorumServer",
"QuorumLearner", true, true);
QuorumCnxManager peer1 = createAndStartManager(1);
peer0.connectOne(1);
peer1.connectOne(0);
assertEventuallyNotConnected(peer0, 1);
}
/**
* No auth learner connects to a server that requires auth, when the server
* has a higher sid.
* The connection should fail in both directions.
*
* peer0 should attempt to connect to peer1, but disconnect as its sid is lower
* peer1 should attempt to connect to peer0, peer0 will accept and add an entry to
* the senderWorkerMap but peer1 will disconnect because peer1 will start speaking SASL
* and peer0 will consider this invalid.
*
* Due to the unique behavior of peer0 creating an entry
* in senderWorkerMap for peer1 and then deleting it we use mockito spies to track
* this behavior.
*/
@Test(timeout = 30000)
public void testNoAuthLearnerConnectToAuthRequiredServerWithHigherSid()
throws Exception {
ConcurrentHashMap<Long, QuorumCnxManager.SendWorker> senderWorkerMap0 =
spy(new ConcurrentHashMap<Long, QuorumCnxManager.SendWorker>());
ConcurrentHashMap<Long, QuorumCnxManager.SendWorker> senderWorkerMap1 =
spy(new ConcurrentHashMap<Long, QuorumCnxManager.SendWorker>());
QuorumCnxManager peer0 = createAndStartManager(0, senderWorkerMap0);
QuorumCnxManager peer1 = createAndStartManager(1, "QuorumServer", "QuorumLearner",
true, true, senderWorkerMap1);
peer0.connectOne(1);
peer1.connectOne(0);
verify(senderWorkerMap0, timeout(10000)).put(eq(1L), any(QuorumCnxManager.SendWorker.class));
verify(senderWorkerMap0, timeout(10000)).remove(eq(1L), any(QuorumCnxManager.SendWorker.class));
verify(senderWorkerMap1, never()).put(anyLong(), any(QuorumCnxManager.SendWorker.class));
}
/**
* An auth learner connects to a auth server, but the credentials are bad.
* The peer with the higher sid has the bad credentials.
* The connection will be denied.
*/
@Test(timeout = 30000)
public void testAuthLearnerBadCredToAuthRequiredServerWithLowerSid()
throws Exception {
QuorumCnxManager peer0 = createAndStartManager(0, "QuorumServer",
"QuorumLearner", true, true);
QuorumCnxManager peer1 = createAndStartManager(1, "QuorumServer",
"QuorumLearnerInvalid", true, true);
peer0.connectOne(1);
peer1.connectOne(0);
assertEventuallyNotConnected(peer0, 1);
}
/**
* An auth learner connects to a auth server, but the credentials are bad.
* The peer with the lower sid has the bad credentials.
* The connection will work, because peer1 is connecting to peer0.
*/
@Test(timeout = 30000)
public void testAuthLearnerBadCredToAuthRequiredServerWithHigherSid()
throws Exception {
QuorumCnxManager peer0 = createAndStartManager(0, "QuorumServer",
"QuorumLearnerInvalid", true, true);
QuorumCnxManager peer1 = createAndStartManager(1, "QuorumServer",
"QuorumLearner", true, true);
peer0.connectOne(1);
peer1.connectOne(0);
assertEventuallyConnected(peer0, 1);
assertEventuallyConnected(peer1, 0);
}
/**
* An auth learner connects to a auth server, but the credentials are bad.
* The connection should fail in both directions.
*/
@Test(timeout = 30000)
public void testAuthLearnerBadCredToNoAuthServerWithHigherSid() throws Exception {
QuorumCnxManager peer0 = createAndStartManager(0, "QuorumServer",
"QuorumLearner", false, false);
QuorumCnxManager peer1 = createAndStartManager(1, "QuorumServer",
"QuorumLearnerInvalid", true, true);
peer1.connectOne(0);
assertEventuallyNotConnected(peer1, 0);
}
/**
* An auth learner connects to a auth server, but the credentials are bad.
* The peer with the lower sid has the bad credentials.
* The connection will work, because peer0 is connecting to peer1 and peer1
* server doesn't require sasl
*/
@Test(timeout = 30000)
public void testAuthLearnerBadCredToNoAuthServerWithLowerSid() throws Exception {
QuorumCnxManager peer0 = createAndStartManager(0, "QuorumServer",
"QuorumLearnerInvalid", true, true);
QuorumCnxManager peer1 = createAndStartManager(1, "QuorumServer",
"QuorumLearner", false, true);
peer0.connectOne(1);
assertEventuallyConnected(peer0, 1);
assertEventuallyConnected(peer1, 0);
}
/**
* Test verifies that the LearnerHandler should authenticate the connecting
* quorumpeer. Here its simulating authentication failure and it should throw
* SaslException
*/
@Test(timeout = 30000)
public void testLearnerHandlerAuthFailed() throws Exception {
File testData = ClientBase.createTmpDir();
Socket leaderSocket = getSocketPair();
File tmpDir = File.createTempFile("test", ".dir", testData);
tmpDir.delete();
tmpDir.mkdir();
Leader leader = null;
QuorumPeer peer = createQuorumPeer(tmpDir, true, false, true,
"QuorumLearner", "QuorumServer",
QuorumAuth.QUORUM_KERBEROS_SERVICE_PRINCIPAL_DEFAULT_VALUE);
leader = createLeader(tmpDir, peer);
peer.leader = leader;
// authentication failed as qpserver didn't get auth packet from qpclient.
try {
new LearnerHandler(leaderSocket,
new BufferedInputStream(leaderSocket.getInputStream()), leader);
Assert.fail("Must throw exception as there is an authentication failure");
} catch (SaslException e){
Assert.assertEquals("Mistakely added to learners", 0,
leader.getLearners().size());
}
ClientBase.recursiveDelete(testData);
}
/**
* Test verifies that the Leader should authenticate the connecting learner
* quorumpeer. After the successful authentication it should add this
* learner to the learnerHandler list.
*/
@Test(timeout = 30000)
public void testAuthLearnerConnectsToServerWithAuthRequired()
throws Exception {
File testDataLearner = ClientBase.createTmpDir();
File tmpDir = File.createTempFile("test", ".dir", testDataLearner);
tmpDir.delete();
FileTxnSnapLog ftsl = new FileTxnSnapLog(tmpDir, tmpDir);
QuorumPeer learnerPeer = createQuorumPeer(tmpDir, true, true, true,
"QuorumLearner", "QuorumServer",
QuorumAuth.QUORUM_KERBEROS_SERVICE_PRINCIPAL_DEFAULT_VALUE);
SimpleLearner sl = new SimpleLearner(ftsl, learnerPeer);
File testDataLeader = ClientBase.createTmpDir();
tmpDir = File.createTempFile("test", ".dir", testDataLeader);
tmpDir.delete();
tmpDir.mkdir();
Leader leader = null;
QuorumPeer peer = createQuorumPeer(tmpDir, true, true, true, "QuorumLearner",
"QuorumServer",
QuorumAuth.QUORUM_KERBEROS_SERVICE_PRINCIPAL_DEFAULT_VALUE);
CountDownLatch learnerLatch = new CountDownLatch(1);
leader = createSimpleLeader(tmpDir, peer, learnerLatch);
peer.leader = leader;
startLearnerCnxAcceptorThread(leader);
LOG.info("Start establishing a connection with the Leader");
String hostname = getLeaderHostname(peer);
sl.connectToLeader(peer.getQuorumAddress(), hostname);
// wait till leader socket soTimeout period
Assert.assertTrue("Leader should accept the auth learner connection",
learnerLatch.await(leader.self.tickTime * leader.self.initLimit + 1000,
TimeUnit.MILLISECONDS));
Assert.assertEquals("Failed to added the learner", 1,
leader.getLearners().size());
ClientBase.recursiveDelete(testDataLearner);
ClientBase.recursiveDelete(testDataLeader);
}
private String getLeaderHostname(QuorumPeer peer) {
String hostname = null;
for (QuorumServer p : peer.getView().values()) {
if (p.id == peer.getId()) {
hostname = p.hostname;
break;
}
}
Assert.assertNotNull("Didn't find leader", hostname);
return hostname;
}
/**
* Test verifies that the Leader should authenticate the connecting learner
* quorumpeer. After the successful authentication it should add this
* learner to the learnerHandler list.
*/
@Test(timeout = 30000)
public void testAuthLearnerConnectsToServerWithAuthNotRequired()
throws Exception {
File testDataLearner = ClientBase.createTmpDir();
File tmpDir = File.createTempFile("test", ".dir", testDataLearner);
tmpDir.delete();
FileTxnSnapLog ftsl = new FileTxnSnapLog(tmpDir, tmpDir);
QuorumPeer learnerPeer = createQuorumPeer(tmpDir, true, true, true,
"QuorumLearner", "QuorumServer",
QuorumAuth.QUORUM_KERBEROS_SERVICE_PRINCIPAL_DEFAULT_VALUE);
SimpleLearner sl = new SimpleLearner(ftsl, learnerPeer);
File testDataLeader = ClientBase.createTmpDir();
tmpDir = File.createTempFile("test", ".dir", testDataLeader);
tmpDir.delete();
tmpDir.mkdir();
Leader leader = null;
QuorumPeer peer = createQuorumPeer(tmpDir, true, true, false, "QuorumLearner",
"QuorumServer",
QuorumAuth.QUORUM_KERBEROS_SERVICE_PRINCIPAL_DEFAULT_VALUE);
CountDownLatch learnerLatch = new CountDownLatch(1);
leader = createSimpleLeader(tmpDir, peer, learnerLatch);
peer.leader = leader;
startLearnerCnxAcceptorThread(leader);
LOG.info("Start establishing a connection with the Leader");
String hostname = getLeaderHostname(peer);
sl.connectToLeader(peer.getQuorumAddress(), hostname);
// wait till leader socket soTimeout period
Assert.assertTrue("Leader should accept the auth learner connection",
learnerLatch.await(leader.self.tickTime * leader.self.initLimit + 1000,
TimeUnit.MILLISECONDS));
Assert.assertEquals("Failed to added the learner", 1,
leader.getLearners().size());
ClientBase.recursiveDelete(testDataLearner);
ClientBase.recursiveDelete(testDataLeader);
}
private void startLearnerCnxAcceptorThread(Leader leader)
throws InterruptedException {
final CountDownLatch cnxAcceptorWatcher = new CountDownLatch(1);
leader.cnxAcceptor = leader.new LearnerCnxAcceptor(){
@Override
public void run() {
cnxAcceptorWatcher.countDown();
super.run();
}
};
leader.cnxAcceptor.start();
// waiting to start the thread
Assert.assertTrue("Failed to start leader.cnxAcceptor thread!",
cnxAcceptorWatcher.await(15, TimeUnit.SECONDS));
LOG.info("Started leader.cnxAcceptor:{} thread, state:{}",
leader.cnxAcceptor.getName(), leader.cnxAcceptor.getState());
}
/**
* Test verifies that the Auth enabled Learner is connecting to a Null Auth
* Leader server. Learner is failing to get an auth response from Null Auth
* Leader and fails the connection establishment.
*/
@Test(timeout = 30000)
public void testAuthLearnerConnectsToNullAuthServer()
throws Exception {
File testDataLearner = ClientBase.createTmpDir();
File tmpDir = File.createTempFile("test", ".dir", testDataLearner);
tmpDir.delete();
FileTxnSnapLog ftsl = new FileTxnSnapLog(tmpDir, tmpDir);
QuorumPeer learnerPeer = createQuorumPeer(tmpDir, true, true, true,
"QuorumLearner", "QuorumServer",
QuorumAuth.QUORUM_KERBEROS_SERVICE_PRINCIPAL_DEFAULT_VALUE);
SimpleLearner sl = new SimpleLearner(ftsl, learnerPeer);
File testDataLeader = ClientBase.createTmpDir();
tmpDir = File.createTempFile("test", ".dir", testDataLeader);
tmpDir.delete();
tmpDir.mkdir();
Leader leader = null;
QuorumPeer peer = createQuorumPeer(tmpDir, false, false, false,
"QuorumLearner", "QuorumServer",
QuorumAuth.QUORUM_KERBEROS_SERVICE_PRINCIPAL_DEFAULT_VALUE);
CountDownLatch learnerLatch = new CountDownLatch(1);
leader = createSimpleLeader(tmpDir, peer, learnerLatch);
peer.leader = leader;
startLearnerCnxAcceptorThread(leader);
LOG.info("Start establishing a connection with the Leader");
try {
String hostname = getLeaderHostname(peer);
sl.connectToLeader(peer.getQuorumAddress(), hostname);
Assert.fail("Must throw exception as server doesn't supports authentication");
} catch (IOException e) {
// expected
Assert.assertTrue("Leader should accept the auth learner connection",
learnerLatch.await(leader.self.tickTime * leader.self.initLimit + 500,
TimeUnit.MILLISECONDS));
}
ClientBase.recursiveDelete(testDataLearner);
ClientBase.recursiveDelete(testDataLeader);
}
/**
* Test verifies that the No Auth enabled Learner is connecting to a No Auth
* Leader server. Learner should be able to establish a connection with
* Leader as auth is not required.
*/
@Test(timeout = 30000)
public void testNoAuthLearnerConnectsToServerWithAuthNotRequired()
throws Exception {
File testDataLearner = ClientBase.createTmpDir();
File tmpDir = File.createTempFile("test", ".dir", testDataLearner);
tmpDir.delete();
FileTxnSnapLog ftsl = new FileTxnSnapLog(tmpDir, tmpDir);
QuorumPeer learnerPeer = createQuorumPeer(tmpDir, true, false, false,
"QuorumLearner", "QuorumServer", "");
SimpleLearner sl = new SimpleLearner(ftsl, learnerPeer);
File testDataLeader = ClientBase.createTmpDir();
tmpDir = File.createTempFile("test", ".dir", testDataLeader);
tmpDir.delete();
tmpDir.mkdir();
Leader leader = null;
QuorumPeer peer = createQuorumPeer(tmpDir, true, false, false, "QuorumLearner",
"QuorumServer",
QuorumAuth.QUORUM_KERBEROS_SERVICE_PRINCIPAL_DEFAULT_VALUE);
CountDownLatch learnerLatch = new CountDownLatch(1);
leader = createSimpleLeader(tmpDir, peer, learnerLatch);
peer.leader = leader;
startLearnerCnxAcceptorThread(leader);
LOG.info("Start establishing a connection with the Leader");
String hostname = getLeaderHostname(peer);
sl.connectToLeader(peer.getQuorumAddress(), hostname);
Assert.assertTrue("Leader should accept no auth learner connection",
learnerLatch.await(leader.self.tickTime * leader.self.initLimit + 1000,
TimeUnit.MILLISECONDS));
ClientBase.recursiveDelete(testDataLearner);
ClientBase.recursiveDelete(testDataLeader);
}
/**
* Test verifies that the No Auth enabled Learner is connecting to a No Auth
* Leader server. Learner shouldn't be able to establish a connection with
* Leader as auth as auth is required.
*/
@Test(timeout = 30000)
public void testNoAuthLearnerConnectsToServerWithAuthRequired()
throws Exception {
File testDataLearner = ClientBase.createTmpDir();
File tmpDir = File.createTempFile("test", ".dir", testDataLearner);
tmpDir.delete();
FileTxnSnapLog ftsl = new FileTxnSnapLog(tmpDir, tmpDir);
QuorumPeer learnerPeer = createQuorumPeer(tmpDir, true, false, false,
"QuorumLearner", "QuorumServer", "");
SimpleLearner sl = new SimpleLearner(ftsl, learnerPeer);
File testDataLeader = ClientBase.createTmpDir();
tmpDir = File.createTempFile("test", ".dir", testDataLeader);
tmpDir.delete();
tmpDir.mkdir();
Leader leader = null;
QuorumPeer peer = createQuorumPeer(tmpDir, true, true, true, "QuorumLearner",
"QuorumServer",
QuorumAuth.QUORUM_KERBEROS_SERVICE_PRINCIPAL_DEFAULT_VALUE);
CountDownLatch learnerLatch = new CountDownLatch(1);
leader = createSimpleLeader(tmpDir, peer, learnerLatch);
peer.leader = leader;
startLearnerCnxAcceptorThread(leader);
LOG.info("Start establishing a connection with the Leader");
String hostname = getLeaderHostname(peer);
sl.connectToLeader(peer.getQuorumAddress(), hostname);
Assert.assertFalse("Leader shouldn't accept no auth learner connection",
learnerLatch.await(leader.self.tickTime * leader.self.initLimit + 1000,
TimeUnit.MILLISECONDS));
ClientBase.recursiveDelete(testDataLearner);
ClientBase.recursiveDelete(testDataLeader);
}
/**
* Test verifies that the No Auth enabled Learner is connecting to a No Auth
* Leader server. Learner should be able to establish a connection with
* Leader as auth is not required.
*/
@Test(timeout = 30000)
public void testNoAuthLearnerConnectsToNullAuthServer()
throws Exception {
File testDataLearner = ClientBase.createTmpDir();
File tmpDir = File.createTempFile("test", ".dir", testDataLearner);
tmpDir.delete();
FileTxnSnapLog ftsl = new FileTxnSnapLog(tmpDir, tmpDir);
QuorumPeer learnerPeer = createQuorumPeer(tmpDir, true, false, false,
"QuorumLearner", "QuorumServer", "");
SimpleLearner sl = new SimpleLearner(ftsl, learnerPeer);
File testDataLeader = ClientBase.createTmpDir();
tmpDir = File.createTempFile("test", ".dir", testDataLeader);
tmpDir.delete();
tmpDir.mkdir();
Leader leader = null;
QuorumPeer peer = createQuorumPeer(tmpDir, false, false, false, "", "",
"");
CountDownLatch learnerLatch = new CountDownLatch(1);
leader = createSimpleLeader(tmpDir, peer, learnerLatch);
peer.leader = leader;
startLearnerCnxAcceptorThread(leader);
LOG.info("Start establishing a connection with the Leader");
String hostname = getLeaderHostname(peer);
sl.connectToLeader(peer.getQuorumAddress(), hostname);
Assert.assertTrue("Leader should accept no auth learner connection",
learnerLatch.await(leader.self.tickTime * leader.self.initLimit + 1000,
TimeUnit.MILLISECONDS));
ClientBase.recursiveDelete(testDataLearner);
ClientBase.recursiveDelete(testDataLeader);
}
/**
* SaslQuorumAuthServer throws exception on receiving an invalid quorum
* auth packet.
*/
@Test(timeout = 30000)
public void testSaslQuorumAuthServerWithInvalidQuorumAuthPacket()
throws Exception {
Socket socket = getSocketPair();
DataOutputStream dout = new DataOutputStream(socket.getOutputStream());
BufferedOutputStream bufferedOutput = new BufferedOutputStream(dout);
BinaryOutputArchive boa = BinaryOutputArchive
.getArchive(bufferedOutput);
QuorumAuthPacket authPacket = QuorumAuth
.createPacket(QuorumAuth.Status.IN_PROGRESS, null);
authPacket.setMagic(Long.MAX_VALUE); // invalid magic number
boa.writeRecord(authPacket, null);
bufferedOutput.flush();
QuorumAuthServer authServer = new SaslQuorumAuthServer(true,
"QuorumServer", authzHosts);
BufferedInputStream is = new BufferedInputStream(
socket.getInputStream());
try {
authServer.authenticate(socket, new DataInputStream(is));
Assert.fail("Must throw exception as QuorumAuthPacket is invalid");
} catch (SaslException e) {
// expected
}
}
/**
* NullQuorumAuthServer should return true when no auth quorum packet
* received and timed out.
*/
@Test(timeout = 30000)
public void testNullQuorumAuthServerShouldReturnTrue()
throws Exception {
Socket socket = getSocketPair();
QuorumAuthServer authServer = new NullQuorumAuthServer();
BufferedInputStream is = new BufferedInputStream(
socket.getInputStream());
// It will throw exception and fail the
// test if any unexpected error. Not adding any extra assertion.
authServer.authenticate(socket, new DataInputStream(is));
}
/**
* NullQuorumAuthServer should return true on receiving a valid quorum auth
* packet.
*/
@Test(timeout = 30000)
public void testNullQuorumAuthServerWithValidQuorumAuthPacket()
throws Exception {
Socket socket = getSocketPair();
DataOutputStream dout = new DataOutputStream(socket.getOutputStream());
BufferedOutputStream bufferedOutput = new BufferedOutputStream(dout);
BinaryOutputArchive boa = BinaryOutputArchive
.getArchive(bufferedOutput);
QuorumAuthPacket authPacket = QuorumAuth
.createPacket(QuorumAuth.Status.IN_PROGRESS, null);
boa.writeRecord(authPacket, null);
bufferedOutput.flush();
QuorumAuthServer authServer = new NullQuorumAuthServer();
BufferedInputStream is = new BufferedInputStream(
socket.getInputStream());
// It will throw exception and fail the
// test if any unexpected error. Not adding any extra assertion.
authServer.authenticate(socket, new DataInputStream(is));
}
private QuorumCnxManager createAndStartManager(long sid) {
return createAndStartManager(sid, new ConcurrentHashMap<Long, QuorumCnxManager.SendWorker>());
}
private QuorumCnxManager createAndStartManager(long sid, ConcurrentHashMap<Long, QuorumCnxManager.SendWorker> senderWorkerMap) {
QuorumCnxManager peer = new QuorumCnxManager(sid, peers,
new NullQuorumAuthServer(), new NullQuorumAuthLearner(), 10000,
false, quorumCnxnThreadsSize, false,
senderWorkerMap);
executor.submit(peer.listener);
InetSocketAddress electionAddr = peer.view.get(sid).electionAddr;
waitForElectionAddrBinding(electionAddr, 15);
return peer;
}
private QuorumCnxManager createAndStartManager(long sid,
String serverLoginContext,
String learnerLoginContext,
boolean serverRequireSasl,
boolean learnerRequireSasl) throws Exception {
return createAndStartManager(sid, serverLoginContext, learnerLoginContext, serverRequireSasl, learnerRequireSasl, new ConcurrentHashMap<Long, QuorumCnxManager.SendWorker>());
}
private QuorumCnxManager createAndStartManager(long sid,
String serverLoginContext,
String learnerLoginContext,
boolean serverRequireSasl,
boolean learnerRequireSasl,
ConcurrentHashMap<Long, QuorumCnxManager.SendWorker> senderWorkerMap)
throws Exception {
QuorumAuthLearner authClient = new SaslQuorumAuthLearner(learnerRequireSasl,
"NOT_USING_KRB_PRINCIPAL", learnerLoginContext);
QuorumAuthServer authServer = new SaslQuorumAuthServer(serverRequireSasl,
serverLoginContext, authzHosts);
QuorumCnxManager peer = new QuorumCnxManager(sid, peers,
authServer, authClient, 10000, false, quorumCnxnThreadsSize, true, senderWorkerMap);
executor.submit(peer.listener);
InetSocketAddress electionAddr = peer.view.get(sid).electionAddr;
waitForElectionAddrBinding(electionAddr, 15);
return peer;
}
private void waitForElectionAddrBinding(InetSocketAddress electionAddr,
int retries) {
boolean success = false;
while (retries > 0) {
Socket sock = new Socket();
try {
sock.setTcpNoDelay(true);
sock.setSoTimeout(5000);
sock.connect(electionAddr, 5000);
success = true;
} catch (IOException e) {
LOG.error("IOException while checking election addr", e);
} finally {
cleanup(sock);
}
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// ignore
}
retries--;
}
Assert.assertTrue("Did not connect to election port", success);
}
private void cleanup(Socket sock) {
try {
sock.close();
} catch (IOException ie) {
LOG.error("Exception while closing socket", ie);
}
}
private void assertEventuallyConnected(QuorumCnxManager peer, long sid)
throws Exception {
for (int i = 0; i < 20 && !peer.connectedToPeer(sid); i++) {
Thread.sleep(1000);
}
Assert.assertTrue("Not connected to peer", peer.connectedToPeer(sid));
}
private void assertEventuallyNotConnected(QuorumCnxManager peer, long sid)
throws Exception {
for (int i = 0; i < 3 && !peer.connectedToPeer(sid); i++) {
Thread.sleep(1000);
}
Assert.assertFalse("Connected to peer (shouldn't be)",
peer.connectedToPeer(sid));
}
private QuorumPeer createQuorumPeer(File tmpDir,
boolean isQuorumAuthEnabled, boolean isQuorumLearnerAuthRequired,
boolean isQuorumServerAuthRequired, String quorumLearnerLoginContext,
String quorumServerLoginContext, String quorumServicePrincipal)
throws IOException, FileNotFoundException {
QuorumPeer peer = QuorumPeer.testingQuorumPeer();
peer.syncLimit = 2;
peer.initLimit = 2;
peer.tickTime = 2000;
peer.quorumPeers = new HashMap<Long, QuorumServer>();
peer.quorumPeers.put(0L,
new QuorumServer(0, "0.0.0.0", PortAssignment.unique(), null, null));
peer.quorumPeers.put(1L,
new QuorumServer(1, "0.0.0.0", PortAssignment.unique(), null, null));
peer.setQuorumVerifier(new QuorumMaj(3));
peer.setCnxnFactory(new NullServerCnxnFactory());
// auth
if (isQuorumAuthEnabled) {
peer.authServer = new SaslQuorumAuthServer(
isQuorumServerAuthRequired, quorumServerLoginContext, authzHosts);
peer.authLearner = new SaslQuorumAuthLearner(
isQuorumLearnerAuthRequired, quorumServicePrincipal,
quorumLearnerLoginContext);
}
File version2 = new File(tmpDir, "version-2");
version2.mkdir();
FileOutputStream fos;
fos = new FileOutputStream(new File(version2, "currentEpoch"));
fos.write("0\n".getBytes());
fos.close();
fos = new FileOutputStream(new File(version2, "acceptedEpoch"));
fos.write("0\n".getBytes());
fos.close();
return peer;
}
private static final class NullServerCnxnFactory extends ServerCnxnFactory {
public void startup(ZooKeeperServer zkServer)
throws IOException, InterruptedException {
}
public void start() {
}
public void shutdown() {
}
public void setMaxClientCnxnsPerHost(int max) {
}
public void join() throws InterruptedException {
}
public int getMaxClientCnxnsPerHost() {
return 0;
}
public int getLocalPort() {
return 0;
}
public InetSocketAddress getLocalAddress() {
return null;
}
public Iterable<ServerCnxn> getConnections() {
return null;
}
public void configure(InetSocketAddress addr, int maxClientCnxns)
throws IOException {
}
public void closeSession(long sessionId) {
}
public void closeAll() {
}
@Override
public int getNumAliveConnections() {
return 0;
}
}
private static Socket getSocketPair() throws IOException {
ServerSocket ss = new ServerSocket();
ss.bind(null);
InetSocketAddress endPoint = (InetSocketAddress) ss
.getLocalSocketAddress();
Socket s = new Socket(endPoint.getAddress(), endPoint.getPort());
s.setSoTimeout(5000);
return s;
}
private Leader createLeader(File tmpDir, QuorumPeer peer) throws IOException,
NoSuchFieldException, IllegalAccessException {
LeaderZooKeeperServer zk = prepareLeader(tmpDir, peer);
return new Leader(peer, zk);
}
private Leader createSimpleLeader(File tmpDir, QuorumPeer peer,
CountDownLatch learnerLatch) throws IOException,
NoSuchFieldException, IllegalAccessException {
LeaderZooKeeperServer zk = prepareLeader(tmpDir, peer);
return new SimpleLeader(peer, zk, learnerLatch);
}
class SimpleLeader extends Leader {
final CountDownLatch learnerLatch;
SimpleLeader(QuorumPeer self, LeaderZooKeeperServer zk,
CountDownLatch latch) throws IOException {
super(self, zk);
this.learnerLatch = latch;
}
@Override
void addLearnerHandler(LearnerHandler learner) {
super.addLearnerHandler(learner);
learnerLatch.countDown();
}
}
private LeaderZooKeeperServer prepareLeader(File tmpDir, QuorumPeer peer)
throws IOException, NoSuchFieldException, IllegalAccessException {
FileTxnSnapLog logFactory = new FileTxnSnapLog(tmpDir, tmpDir);
peer.setTxnFactory(logFactory);
Field addrField = peer.getClass().getDeclaredField("myQuorumAddr");
addrField.setAccessible(true);
addrField.set(peer, new InetSocketAddress(PortAssignment.unique()));
ZKDatabase zkDb = new ZKDatabase(logFactory);
LeaderZooKeeperServer zk = new LeaderZooKeeperServer(logFactory, peer,
new ZooKeeperServer.BasicDataTreeBuilder(), zkDb);
return zk;
}
class SimpleLearnerZooKeeperServer extends LearnerZooKeeperServer {
boolean startupCalled;
public SimpleLearnerZooKeeperServer(FileTxnSnapLog ftsl,
QuorumPeer self) throws IOException {
super(ftsl, 2000, 2000, 2000, null, new ZKDatabase(ftsl), self);
}
Learner learner;
@Override
public Learner getLearner() {
return learner;
}
@Override
public void startup() {
startupCalled = true;
}
}
class SimpleLearner extends Learner {
SimpleLearner(FileTxnSnapLog ftsl, QuorumPeer learner)
throws IOException {
self = learner;
zk = new SimpleLearnerZooKeeperServer(ftsl, self);
((SimpleLearnerZooKeeperServer) zk).learner = this;
}
}
}
| |
/*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.util.concurrent;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Supplier;
import com.google.common.base.Throwables;
import com.google.common.collect.Lists;
import com.google.common.collect.Queues;
import com.google.common.util.concurrent.ForwardingListenableFuture.SimpleForwardingListenableFuture;
import java.lang.reflect.InvocationTargetException;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.Delayed;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.ThreadPoolExecutor.CallerRunsPolicy;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import javax.annotation.concurrent.GuardedBy;
/**
* Factory and utility methods for {@link java.util.concurrent.Executor}, {@link
* ExecutorService}, and {@link ThreadFactory}.
*
* @author Eric Fellheimer
* @author Kyle Littlefield
* @author Justin Mahoney
* @since 3.0
*/
@GwtCompatible(emulated = true)
public final class MoreExecutors {
private MoreExecutors() {}
/**
* Converts the given ThreadPoolExecutor into an ExecutorService that exits
* when the application is complete. It does so by using daemon threads and
* adding a shutdown hook to wait for their completion.
*
* <p>This is mainly for fixed thread pools.
* See {@link Executors#newFixedThreadPool(int)}.
*
* @param executor the executor to modify to make sure it exits when the
* application is finished
* @param terminationTimeout how long to wait for the executor to
* finish before terminating the JVM
* @param timeUnit unit of time for the time parameter
* @return an unmodifiable version of the input which will not hang the JVM
*/
@Beta
@GwtIncompatible("TODO")
public static ExecutorService getExitingExecutorService(
ThreadPoolExecutor executor, long terminationTimeout, TimeUnit timeUnit) {
return new Application()
.getExitingExecutorService(executor, terminationTimeout, timeUnit);
}
/**
* Converts the given ScheduledThreadPoolExecutor into a
* ScheduledExecutorService that exits when the application is complete. It
* does so by using daemon threads and adding a shutdown hook to wait for
* their completion.
*
* <p>This is mainly for fixed thread pools.
* See {@link Executors#newScheduledThreadPool(int)}.
*
* @param executor the executor to modify to make sure it exits when the
* application is finished
* @param terminationTimeout how long to wait for the executor to
* finish before terminating the JVM
* @param timeUnit unit of time for the time parameter
* @return an unmodifiable version of the input which will not hang the JVM
*/
@Beta
@GwtIncompatible("TODO")
public static ScheduledExecutorService getExitingScheduledExecutorService(
ScheduledThreadPoolExecutor executor, long terminationTimeout, TimeUnit timeUnit) {
return new Application()
.getExitingScheduledExecutorService(executor, terminationTimeout, timeUnit);
}
/**
* Add a shutdown hook to wait for thread completion in the given
* {@link ExecutorService service}. This is useful if the given service uses
* daemon threads, and we want to keep the JVM from exiting immediately on
* shutdown, instead giving these daemon threads a chance to terminate
* normally.
* @param service ExecutorService which uses daemon threads
* @param terminationTimeout how long to wait for the executor to finish
* before terminating the JVM
* @param timeUnit unit of time for the time parameter
*/
@Beta
@GwtIncompatible("TODO")
public static void addDelayedShutdownHook(
ExecutorService service, long terminationTimeout, TimeUnit timeUnit) {
new Application()
.addDelayedShutdownHook(service, terminationTimeout, timeUnit);
}
/**
* Converts the given ThreadPoolExecutor into an ExecutorService that exits
* when the application is complete. It does so by using daemon threads and
* adding a shutdown hook to wait for their completion.
*
* <p>This method waits 120 seconds before continuing with JVM termination,
* even if the executor has not finished its work.
*
* <p>This is mainly for fixed thread pools.
* See {@link Executors#newFixedThreadPool(int)}.
*
* @param executor the executor to modify to make sure it exits when the
* application is finished
* @return an unmodifiable version of the input which will not hang the JVM
*/
@Beta
@GwtIncompatible("concurrency")
public static ExecutorService getExitingExecutorService(ThreadPoolExecutor executor) {
return new Application().getExitingExecutorService(executor);
}
/**
* Converts the given ThreadPoolExecutor into a ScheduledExecutorService that
* exits when the application is complete. It does so by using daemon threads
* and adding a shutdown hook to wait for their completion.
*
* <p>This method waits 120 seconds before continuing with JVM termination,
* even if the executor has not finished its work.
*
* <p>This is mainly for fixed thread pools.
* See {@link Executors#newScheduledThreadPool(int)}.
*
* @param executor the executor to modify to make sure it exits when the
* application is finished
* @return an unmodifiable version of the input which will not hang the JVM
*/
@Beta
@GwtIncompatible("TODO")
public static ScheduledExecutorService getExitingScheduledExecutorService(
ScheduledThreadPoolExecutor executor) {
return new Application().getExitingScheduledExecutorService(executor);
}
/** Represents the current application to register shutdown hooks. */
@GwtIncompatible("TODO")
@VisibleForTesting
static class Application {
final ExecutorService getExitingExecutorService(
ThreadPoolExecutor executor, long terminationTimeout, TimeUnit timeUnit) {
useDaemonThreadFactory(executor);
ExecutorService service = Executors.unconfigurableExecutorService(executor);
addDelayedShutdownHook(service, terminationTimeout, timeUnit);
return service;
}
final ScheduledExecutorService getExitingScheduledExecutorService(
ScheduledThreadPoolExecutor executor, long terminationTimeout, TimeUnit timeUnit) {
useDaemonThreadFactory(executor);
ScheduledExecutorService service = Executors.unconfigurableScheduledExecutorService(executor);
addDelayedShutdownHook(service, terminationTimeout, timeUnit);
return service;
}
final void addDelayedShutdownHook(
final ExecutorService service, final long terminationTimeout, final TimeUnit timeUnit) {
checkNotNull(service);
checkNotNull(timeUnit);
addShutdownHook(MoreExecutors.newThread("DelayedShutdownHook-for-" + service, new Runnable() {
@Override
public void run() {
try {
// We'd like to log progress and failures that may arise in the
// following code, but unfortunately the behavior of logging
// is undefined in shutdown hooks.
// This is because the logging code installs a shutdown hook of its
// own. See Cleaner class inside {@link LogManager}.
service.shutdown();
service.awaitTermination(terminationTimeout, timeUnit);
} catch (InterruptedException ignored) {
// We're shutting down anyway, so just ignore.
}
}
}));
}
final ExecutorService getExitingExecutorService(ThreadPoolExecutor executor) {
return getExitingExecutorService(executor, 120, TimeUnit.SECONDS);
}
final ScheduledExecutorService getExitingScheduledExecutorService(
ScheduledThreadPoolExecutor executor) {
return getExitingScheduledExecutorService(executor, 120, TimeUnit.SECONDS);
}
@VisibleForTesting void addShutdownHook(Thread hook) {
Runtime.getRuntime().addShutdownHook(hook);
}
}
@GwtIncompatible("TODO")
private static void useDaemonThreadFactory(ThreadPoolExecutor executor) {
executor.setThreadFactory(new ThreadFactoryBuilder()
.setDaemon(true)
.setThreadFactory(executor.getThreadFactory())
.build());
}
/**
* Creates an executor service that runs each task in the thread
* that invokes {@code execute/submit}, as in {@link CallerRunsPolicy}. This
* applies both to individually submitted tasks and to collections of tasks
* submitted via {@code invokeAll} or {@code invokeAny}. In the latter case,
* tasks will run serially on the calling thread. Tasks are run to
* completion before a {@code Future} is returned to the caller (unless the
* executor has been shutdown).
*
* <p>Although all tasks are immediately executed in the thread that
* submitted the task, this {@code ExecutorService} imposes a small
* locking overhead on each task submission in order to implement shutdown
* and termination behavior.
*
* <p>The implementation deviates from the {@code ExecutorService}
* specification with regards to the {@code shutdownNow} method. First,
* "best-effort" with regards to canceling running tasks is implemented
* as "no-effort". No interrupts or other attempts are made to stop
* threads executing tasks. Second, the returned list will always be empty,
* as any submitted task is considered to have started execution.
* This applies also to tasks given to {@code invokeAll} or {@code invokeAny}
* which are pending serial execution, even the subset of the tasks that
* have not yet started execution. It is unclear from the
* {@code ExecutorService} specification if these should be included, and
* it's much easier to implement the interpretation that they not be.
* Finally, a call to {@code shutdown} or {@code shutdownNow} may result
* in concurrent calls to {@code invokeAll/invokeAny} throwing
* RejectedExecutionException, although a subset of the tasks may already
* have been executed.
*
* @since 10.0 (<a href="https://github.com/google/guava/wiki/Compatibility"
* >mostly source-compatible</a> since 3.0)
* @deprecated Use {@link #directExecutor()} if you only require an {@link Executor} and
* {@link #newDirectExecutorService()} if you need a {@link ListeningExecutorService}. This
* method will be removed in August 2016.
*/
@Deprecated
@GwtIncompatible("TODO")
public static ListeningExecutorService sameThreadExecutor() {
return new DirectExecutorService();
}
// See sameThreadExecutor javadoc for behavioral notes.
@GwtIncompatible("TODO")
private static class DirectExecutorService
extends AbstractListeningExecutorService {
/**
* Lock used whenever accessing the state variables
* (runningTasks, shutdown) of the executor
*/
private final Object lock = new Object();
/*
* Conceptually, these two variables describe the executor being in
* one of three states:
* - Active: shutdown == false
* - Shutdown: runningTasks > 0 and shutdown == true
* - Terminated: runningTasks == 0 and shutdown == true
*/
@GuardedBy("lock") private int runningTasks = 0;
@GuardedBy("lock") private boolean shutdown = false;
@Override
public void execute(Runnable command) {
startTask();
try {
command.run();
} finally {
endTask();
}
}
@Override
public boolean isShutdown() {
synchronized (lock) {
return shutdown;
}
}
@Override
public void shutdown() {
synchronized (lock) {
shutdown = true;
if (runningTasks == 0) {
lock.notifyAll();
}
}
}
// See sameThreadExecutor javadoc for unusual behavior of this method.
@Override
public List<Runnable> shutdownNow() {
shutdown();
return Collections.emptyList();
}
@Override
public boolean isTerminated() {
synchronized (lock) {
return shutdown && runningTasks == 0;
}
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit)
throws InterruptedException {
long nanos = unit.toNanos(timeout);
synchronized (lock) {
for (;;) {
if (shutdown && runningTasks == 0) {
return true;
} else if (nanos <= 0) {
return false;
} else {
long now = System.nanoTime();
TimeUnit.NANOSECONDS.timedWait(lock, nanos);
nanos -= System.nanoTime() - now; // subtract the actual time we waited
}
}
}
}
/**
* Checks if the executor has been shut down and increments the running
* task count.
*
* @throws RejectedExecutionException if the executor has been previously
* shutdown
*/
private void startTask() {
synchronized (lock) {
if (shutdown) {
throw new RejectedExecutionException("Executor already shutdown");
}
runningTasks++;
}
}
/**
* Decrements the running task count.
*/
private void endTask() {
synchronized (lock) {
int numRunning = --runningTasks;
if (numRunning == 0) {
lock.notifyAll();
}
}
}
}
/**
* Creates an executor service that runs each task in the thread
* that invokes {@code execute/submit}, as in {@link CallerRunsPolicy} This
* applies both to individually submitted tasks and to collections of tasks
* submitted via {@code invokeAll} or {@code invokeAny}. In the latter case,
* tasks will run serially on the calling thread. Tasks are run to
* completion before a {@code Future} is returned to the caller (unless the
* executor has been shutdown).
*
* <p>Although all tasks are immediately executed in the thread that
* submitted the task, this {@code ExecutorService} imposes a small
* locking overhead on each task submission in order to implement shutdown
* and termination behavior.
*
* <p>The implementation deviates from the {@code ExecutorService}
* specification with regards to the {@code shutdownNow} method. First,
* "best-effort" with regards to canceling running tasks is implemented
* as "no-effort". No interrupts or other attempts are made to stop
* threads executing tasks. Second, the returned list will always be empty,
* as any submitted task is considered to have started execution.
* This applies also to tasks given to {@code invokeAll} or {@code invokeAny}
* which are pending serial execution, even the subset of the tasks that
* have not yet started execution. It is unclear from the
* {@code ExecutorService} specification if these should be included, and
* it's much easier to implement the interpretation that they not be.
* Finally, a call to {@code shutdown} or {@code shutdownNow} may result
* in concurrent calls to {@code invokeAll/invokeAny} throwing
* RejectedExecutionException, although a subset of the tasks may already
* have been executed.
*
* @since 18.0 (present as MoreExecutors.sameThreadExecutor() since 10.0)
*/
@GwtIncompatible("TODO")
public static ListeningExecutorService newDirectExecutorService() {
return new DirectExecutorService();
}
/**
* Returns an {@link Executor} that runs each task in the thread that invokes
* {@link Executor#execute execute}, as in {@link CallerRunsPolicy}.
*
* <p>This instance is equivalent to: <pre> {@code
* final class DirectExecutor implements Executor {
* public void execute(Runnable r) {
* r.run();
* }
* }}</pre>
*
* <p>This should be preferred to {@link #newDirectExecutorService()} because the implementing the
* {@link ExecutorService} subinterface necessitates significant performance overhead.
*
* @since 18.0
*/
public static Executor directExecutor() {
return DirectExecutor.INSTANCE;
}
/** See {@link #directExecutor} for behavioral notes. */
private enum DirectExecutor implements Executor {
INSTANCE;
@Override public void execute(Runnable command) {
command.run();
}
@Override public String toString() {
return "MoreExecutors.directExecutor()";
}
}
/**
* Creates an {@link ExecutorService} whose {@code submit} and {@code
* invokeAll} methods submit {@link ListenableFutureTask} instances to the
* given delegate executor. Those methods, as well as {@code execute} and
* {@code invokeAny}, are implemented in terms of calls to {@code
* delegate.execute}. All other methods are forwarded unchanged to the
* delegate. This implies that the returned {@code ListeningExecutorService}
* never calls the delegate's {@code submit}, {@code invokeAll}, and {@code
* invokeAny} methods, so any special handling of tasks must be implemented in
* the delegate's {@code execute} method or by wrapping the returned {@code
* ListeningExecutorService}.
*
* <p>If the delegate executor was already an instance of {@code
* ListeningExecutorService}, it is returned untouched, and the rest of this
* documentation does not apply.
*
* @since 10.0
*/
@GwtIncompatible("TODO")
public static ListeningExecutorService listeningDecorator(
ExecutorService delegate) {
return (delegate instanceof ListeningExecutorService)
? (ListeningExecutorService) delegate
: (delegate instanceof ScheduledExecutorService)
? new ScheduledListeningDecorator((ScheduledExecutorService) delegate)
: new ListeningDecorator(delegate);
}
/**
* Creates a {@link ScheduledExecutorService} whose {@code submit} and {@code
* invokeAll} methods submit {@link ListenableFutureTask} instances to the
* given delegate executor. Those methods, as well as {@code execute} and
* {@code invokeAny}, are implemented in terms of calls to {@code
* delegate.execute}. All other methods are forwarded unchanged to the
* delegate. This implies that the returned {@code
* ListeningScheduledExecutorService} never calls the delegate's {@code
* submit}, {@code invokeAll}, and {@code invokeAny} methods, so any special
* handling of tasks must be implemented in the delegate's {@code execute}
* method or by wrapping the returned {@code
* ListeningScheduledExecutorService}.
*
* <p>If the delegate executor was already an instance of {@code
* ListeningScheduledExecutorService}, it is returned untouched, and the rest
* of this documentation does not apply.
*
* @since 10.0
*/
@GwtIncompatible("TODO")
public static ListeningScheduledExecutorService listeningDecorator(
ScheduledExecutorService delegate) {
return (delegate instanceof ListeningScheduledExecutorService)
? (ListeningScheduledExecutorService) delegate
: new ScheduledListeningDecorator(delegate);
}
@GwtIncompatible("TODO")
private static class ListeningDecorator
extends AbstractListeningExecutorService {
private final ExecutorService delegate;
ListeningDecorator(ExecutorService delegate) {
this.delegate = checkNotNull(delegate);
}
@Override
public boolean awaitTermination(long timeout, TimeUnit unit)
throws InterruptedException {
return delegate.awaitTermination(timeout, unit);
}
@Override
public boolean isShutdown() {
return delegate.isShutdown();
}
@Override
public boolean isTerminated() {
return delegate.isTerminated();
}
@Override
public void shutdown() {
delegate.shutdown();
}
@Override
public List<Runnable> shutdownNow() {
return delegate.shutdownNow();
}
@Override
public void execute(Runnable command) {
delegate.execute(command);
}
}
@GwtIncompatible("TODO")
private static class ScheduledListeningDecorator
extends ListeningDecorator implements ListeningScheduledExecutorService {
@SuppressWarnings("hiding")
final ScheduledExecutorService delegate;
ScheduledListeningDecorator(ScheduledExecutorService delegate) {
super(delegate);
this.delegate = checkNotNull(delegate);
}
@Override
public ListenableScheduledFuture<?> schedule(
Runnable command, long delay, TimeUnit unit) {
TrustedListenableFutureTask<Void> task =
TrustedListenableFutureTask.create(command, null);
ScheduledFuture<?> scheduled = delegate.schedule(task, delay, unit);
return new ListenableScheduledTask<Void>(task, scheduled);
}
@Override
public <V> ListenableScheduledFuture<V> schedule(
Callable<V> callable, long delay, TimeUnit unit) {
TrustedListenableFutureTask<V> task = TrustedListenableFutureTask.create(callable);
ScheduledFuture<?> scheduled = delegate.schedule(task, delay, unit);
return new ListenableScheduledTask<V>(task, scheduled);
}
@Override
public ListenableScheduledFuture<?> scheduleAtFixedRate(
Runnable command, long initialDelay, long period, TimeUnit unit) {
NeverSuccessfulListenableFutureTask task =
new NeverSuccessfulListenableFutureTask(command);
ScheduledFuture<?> scheduled =
delegate.scheduleAtFixedRate(task, initialDelay, period, unit);
return new ListenableScheduledTask<Void>(task, scheduled);
}
@Override
public ListenableScheduledFuture<?> scheduleWithFixedDelay(
Runnable command, long initialDelay, long delay, TimeUnit unit) {
NeverSuccessfulListenableFutureTask task =
new NeverSuccessfulListenableFutureTask(command);
ScheduledFuture<?> scheduled =
delegate.scheduleWithFixedDelay(task, initialDelay, delay, unit);
return new ListenableScheduledTask<Void>(task, scheduled);
}
private static final class ListenableScheduledTask<V>
extends SimpleForwardingListenableFuture<V>
implements ListenableScheduledFuture<V> {
private final ScheduledFuture<?> scheduledDelegate;
public ListenableScheduledTask(
ListenableFuture<V> listenableDelegate,
ScheduledFuture<?> scheduledDelegate) {
super(listenableDelegate);
this.scheduledDelegate = scheduledDelegate;
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
boolean cancelled = super.cancel(mayInterruptIfRunning);
if (cancelled) {
// Unless it is cancelled, the delegate may continue being scheduled
scheduledDelegate.cancel(mayInterruptIfRunning);
// TODO(user): Cancel "this" if "scheduledDelegate" is cancelled.
}
return cancelled;
}
@Override
public long getDelay(TimeUnit unit) {
return scheduledDelegate.getDelay(unit);
}
@Override
public int compareTo(Delayed other) {
return scheduledDelegate.compareTo(other);
}
}
@GwtIncompatible("TODO")
private static final class NeverSuccessfulListenableFutureTask
extends AbstractFuture<Void>
implements Runnable {
private final Runnable delegate;
public NeverSuccessfulListenableFutureTask(Runnable delegate) {
this.delegate = checkNotNull(delegate);
}
@Override public void run() {
try {
delegate.run();
} catch (Throwable t) {
setException(t);
throw Throwables.propagate(t);
}
}
}
}
/*
* This following method is a modified version of one found in
* http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/test/tck/AbstractExecutorServiceTest.java?revision=1.30
* which contained the following notice:
*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
* Other contributors include Andrew Wright, Jeffrey Hayes,
* Pat Fisher, Mike Judd.
*/
/**
* An implementation of {@link ExecutorService#invokeAny} for {@link ListeningExecutorService}
* implementations.
*/ static <T> T invokeAnyImpl(ListeningExecutorService executorService,
Collection<? extends Callable<T>> tasks, boolean timed, long nanos)
throws InterruptedException, ExecutionException, TimeoutException {
checkNotNull(executorService);
int ntasks = tasks.size();
checkArgument(ntasks > 0);
List<Future<T>> futures = Lists.newArrayListWithCapacity(ntasks);
BlockingQueue<Future<T>> futureQueue = Queues.newLinkedBlockingQueue();
// For efficiency, especially in executors with limited
// parallelism, check to see if previously submitted tasks are
// done before submitting more of them. This interleaving
// plus the exception mechanics account for messiness of main
// loop.
try {
// Record exceptions so that if we fail to obtain any
// result, we can throw the last exception we got.
ExecutionException ee = null;
long lastTime = timed ? System.nanoTime() : 0;
Iterator<? extends Callable<T>> it = tasks.iterator();
futures.add(submitAndAddQueueListener(executorService, it.next(), futureQueue));
--ntasks;
int active = 1;
for (;;) {
Future<T> f = futureQueue.poll();
if (f == null) {
if (ntasks > 0) {
--ntasks;
futures.add(submitAndAddQueueListener(executorService, it.next(), futureQueue));
++active;
} else if (active == 0) {
break;
} else if (timed) {
f = futureQueue.poll(nanos, TimeUnit.NANOSECONDS);
if (f == null) {
throw new TimeoutException();
}
long now = System.nanoTime();
nanos -= now - lastTime;
lastTime = now;
} else {
f = futureQueue.take();
}
}
if (f != null) {
--active;
try {
return f.get();
} catch (ExecutionException eex) {
ee = eex;
} catch (RuntimeException rex) {
ee = new ExecutionException(rex);
}
}
}
if (ee == null) {
ee = new ExecutionException(null);
}
throw ee;
} finally {
for (Future<T> f : futures) {
f.cancel(true);
}
}
}
/**
* Submits the task and adds a listener that adds the future to {@code queue} when it completes.
*/
@GwtIncompatible("TODO")
private static <T> ListenableFuture<T> submitAndAddQueueListener(
ListeningExecutorService executorService, Callable<T> task,
final BlockingQueue<Future<T>> queue) {
final ListenableFuture<T> future = executorService.submit(task);
future.addListener(new Runnable() {
@Override public void run() {
queue.add(future);
}
}, directExecutor());
return future;
}
/**
* Returns a default thread factory used to create new threads.
*
* <p>On AppEngine, returns {@code ThreadManager.currentRequestThreadFactory()}.
* Otherwise, returns {@link Executors#defaultThreadFactory()}.
*
* @since 14.0
*/
@Beta
@GwtIncompatible("concurrency")
public static ThreadFactory platformThreadFactory() {
if (!isAppEngine()) {
return Executors.defaultThreadFactory();
}
try {
return (ThreadFactory) Class.forName("com.google.appengine.api.ThreadManager")
.getMethod("currentRequestThreadFactory")
.invoke(null);
} catch (IllegalAccessException e) {
throw new RuntimeException("Couldn't invoke ThreadManager.currentRequestThreadFactory", e);
} catch (ClassNotFoundException e) {
throw new RuntimeException("Couldn't invoke ThreadManager.currentRequestThreadFactory", e);
} catch (NoSuchMethodException e) {
throw new RuntimeException("Couldn't invoke ThreadManager.currentRequestThreadFactory", e);
} catch (InvocationTargetException e) {
throw Throwables.propagate(e.getCause());
}
}
@GwtIncompatible("TODO")
private static boolean isAppEngine() {
if (System.getProperty("com.google.appengine.runtime.environment") == null) {
return false;
}
try {
// If the current environment is null, we're not inside AppEngine.
return Class.forName("com.google.apphosting.api.ApiProxy")
.getMethod("getCurrentEnvironment")
.invoke(null) != null;
} catch (ClassNotFoundException e) {
// If ApiProxy doesn't exist, we're not on AppEngine at all.
return false;
} catch (InvocationTargetException e) {
// If ApiProxy throws an exception, we're not in a proper AppEngine environment.
return false;
} catch (IllegalAccessException e) {
// If the method isn't accessible, we're not on a supported version of AppEngine;
return false;
} catch (NoSuchMethodException e) {
// If the method doesn't exist, we're not on a supported version of AppEngine;
return false;
}
}
/**
* Creates a thread using {@link #platformThreadFactory}, and sets its name to {@code name}
* unless changing the name is forbidden by the security manager.
*/
@GwtIncompatible("concurrency")
static Thread newThread(String name, Runnable runnable) {
checkNotNull(name);
checkNotNull(runnable);
Thread result = platformThreadFactory().newThread(runnable);
try {
result.setName(name);
} catch (SecurityException e) {
// OK if we can't set the name in this environment.
}
return result;
}
// TODO(lukes): provide overloads for ListeningExecutorService? ListeningScheduledExecutorService?
// TODO(lukes): provide overloads that take constant strings? Function<Runnable, String>s to
// calculate names?
/**
* Creates an {@link Executor} that renames the {@link Thread threads} that its tasks run in.
*
* <p>The names are retrieved from the {@code nameSupplier} on the thread that is being renamed
* right before each task is run. The renaming is best effort, if a {@link SecurityManager}
* prevents the renaming then it will be skipped but the tasks will still execute.
*
*
* @param executor The executor to decorate
* @param nameSupplier The source of names for each task
*/
@GwtIncompatible("concurrency")
static Executor renamingDecorator(final Executor executor, final Supplier<String> nameSupplier) {
checkNotNull(executor);
checkNotNull(nameSupplier);
if (isAppEngine()) {
// AppEngine doesn't support thread renaming, so don't even try
return executor;
}
return new Executor() {
@Override public void execute(Runnable command) {
executor.execute(Callables.threadRenaming(command, nameSupplier));
}
};
}
/**
* Creates an {@link ExecutorService} that renames the {@link Thread threads} that its tasks run
* in.
*
* <p>The names are retrieved from the {@code nameSupplier} on the thread that is being renamed
* right before each task is run. The renaming is best effort, if a {@link SecurityManager}
* prevents the renaming then it will be skipped but the tasks will still execute.
*
*
* @param service The executor to decorate
* @param nameSupplier The source of names for each task
*/
@GwtIncompatible("concurrency")
static ExecutorService renamingDecorator(final ExecutorService service,
final Supplier<String> nameSupplier) {
checkNotNull(service);
checkNotNull(nameSupplier);
if (isAppEngine()) {
// AppEngine doesn't support thread renaming, so don't even try.
return service;
}
return new WrappingExecutorService(service) {
@Override protected <T> Callable<T> wrapTask(Callable<T> callable) {
return Callables.threadRenaming(callable, nameSupplier);
}
@Override protected Runnable wrapTask(Runnable command) {
return Callables.threadRenaming(command, nameSupplier);
}
};
}
/**
* Creates a {@link ScheduledExecutorService} that renames the {@link Thread threads} that its
* tasks run in.
*
* <p>The names are retrieved from the {@code nameSupplier} on the thread that is being renamed
* right before each task is run. The renaming is best effort, if a {@link SecurityManager}
* prevents the renaming then it will be skipped but the tasks will still execute.
*
*
* @param service The executor to decorate
* @param nameSupplier The source of names for each task
*/
@GwtIncompatible("concurrency")
static ScheduledExecutorService renamingDecorator(final ScheduledExecutorService service,
final Supplier<String> nameSupplier) {
checkNotNull(service);
checkNotNull(nameSupplier);
if (isAppEngine()) {
// AppEngine doesn't support thread renaming, so don't even try.
return service;
}
return new WrappingScheduledExecutorService(service) {
@Override protected <T> Callable<T> wrapTask(Callable<T> callable) {
return Callables.threadRenaming(callable, nameSupplier);
}
@Override protected Runnable wrapTask(Runnable command) {
return Callables.threadRenaming(command, nameSupplier);
}
};
}
/**
* Shuts down the given executor gradually, first disabling new submissions and later cancelling
* existing tasks.
*
* <p>The method takes the following steps:
* <ol>
* <li>calls {@link ExecutorService#shutdown()}, disabling acceptance of new submitted tasks.
* <li>waits for half of the specified timeout.
* <li>if the timeout expires, it calls {@link ExecutorService#shutdownNow()}, cancelling
* pending tasks and interrupting running tasks.
* <li>waits for the other half of the specified timeout.
* </ol>
*
* <p>If, at any step of the process, the calling thread is interrupted, the method calls {@link
* ExecutorService#shutdownNow()} and returns.
*
* @param service the {@code ExecutorService} to shut down
* @param timeout the maximum time to wait for the {@code ExecutorService} to terminate
* @param unit the time unit of the timeout argument
* @return {@code true} if the {@code ExecutorService} was terminated successfully, {@code false}
* the call timed out or was interrupted
* @since 17.0
*/
@Beta
@GwtIncompatible("concurrency")
public static boolean shutdownAndAwaitTermination(
ExecutorService service, long timeout, TimeUnit unit) {
checkNotNull(unit);
// Disable new tasks from being submitted
service.shutdown();
try {
long halfTimeoutNanos = TimeUnit.NANOSECONDS.convert(timeout, unit) / 2;
// Wait for half the duration of the timeout for existing tasks to terminate
if (!service.awaitTermination(halfTimeoutNanos, TimeUnit.NANOSECONDS)) {
// Cancel currently executing tasks
service.shutdownNow();
// Wait the other half of the timeout for tasks to respond to being cancelled
service.awaitTermination(halfTimeoutNanos, TimeUnit.NANOSECONDS);
}
} catch (InterruptedException ie) {
// Preserve interrupt status
Thread.currentThread().interrupt();
// (Re-)Cancel if current thread also interrupted
service.shutdownNow();
}
return service.isTerminated();
}
}
| |
package net.lecousin.framework.core.test.io;
import java.io.EOFException;
import java.io.File;
import java.io.IOException;
import net.lecousin.framework.concurrent.async.Async;
import net.lecousin.framework.concurrent.threads.Task;
import net.lecousin.framework.exception.NoException;
import net.lecousin.framework.io.IO;
import net.lecousin.framework.io.data.Chars;
import net.lecousin.framework.io.text.ICharacterStream;
import net.lecousin.framework.text.CharArrayStringBuffer;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
public abstract class TestCharacterStreamReadableBuffered extends TestIO.UsingGeneratedTestFiles {
protected TestCharacterStreamReadableBuffered(File testFile, byte[] testBuf, int nbBuf) {
super(testFile, testBuf, nbBuf);
}
protected abstract ICharacterStream.Readable.Buffered openStream(IO.Readable io) throws Exception;
@Override
protected IO getIOForCommonTests() {
Assume.assumeTrue(nbBuf < 5000);
return openFile();
}
@Test
public void testIOError() throws Exception {
ICharacterStream.Readable.Buffered s = openStream(new TestIOError.ReadableAlwaysError());
try {
s.read();
throw new AssertionError();
} catch (IOException e) {
// ok
}
try {
if (s.readAsync() == -2) {
s.canStartReading().block(10000);
s.readAsync();
}
throw new AssertionError();
} catch (IOException e) {
// ok
}
try {
s.readNextBufferAsync().blockResult(10000);
throw new AssertionError();
} catch (IOException e) {
// ok
}
try {
s.readNextBuffer();
throw new AssertionError();
} catch (IOException e) {
// ok
}
try {
s.readUntil('m', new CharArrayStringBuffer());
throw new AssertionError();
} catch (IOException e) {
// ok
}
try {
s.readUntilAsync('m', new CharArrayStringBuffer()).blockResult(10000);
throw new AssertionError();
} catch (IOException e) {
// ok
}
s.close();
}
@Test
public void testCharByChar() throws Exception {
ICharacterStream.Readable.Buffered s = openStream(openFile());
for (int i = 0; i < nbBuf; ++i) {
for (int j = 0; j < testBuf.length; ++j) {
char c;
try { c = s.read(); }
catch (IOException e) {
throw new Exception("Read character error at " + (i*testBuf.length+j) + " in file " + testFile.getAbsolutePath(), e);
}
if (c != (char)(testBuf[j]&0xFF))
throw new Exception("Invalid character "+c+" at "+(i*testBuf.length+j));
}
}
try {
s.read();
throw new AssertionError("Can read after the end of stream");
} catch (EOFException e) {}
s.back('w');
Assert.assertEquals('w', s.read());
s.back('z');
char[] buf = new char[20];
Assert.assertTrue(s.readSync(buf, 0, 0) <= 0);
Assert.assertEquals(1, s.readSync(buf, 0, 10));
Assert.assertEquals('z', buf[0]);
Assert.assertTrue(s.readSync(buf, 0, 0) <= 0);
s.back('a');
Assert.assertEquals(1, s.readSync(buf, 0, 1));
Assert.assertEquals('a', buf[0]);
s.close();
}
@Test
public void testCharByCharAsync() throws Exception {
ICharacterStream.Readable.Buffered s = openStream(openFile());
Async<Exception> sp = new Async<>();
continueReadAsync(s, 0, 0, sp);
sp.blockThrow(0);
s.back('z');
char[] buf = new char[20];
Assert.assertEquals(1, s.readAsync(buf, 0, 20).blockResult(0).intValue());
Assert.assertEquals('z', buf[0]);
s.close();
}
private void continueReadAsync(ICharacterStream.Readable.Buffered s, int iBuf, int iChar, Async<Exception> sp) throws Exception {
while (iBuf < nbBuf) {
if ((iBuf + iChar) % 13 == 3) {
s.back('b');
Assert.assertEquals('b', s.readAsync());
}
int c = s.readAsync();
if (c == -1)
throw new Exception("Unexpected end at " + (iBuf * testBuf.length + iChar));
if (c == -2) {
int i = iBuf;
int j = iChar;
s.canStartReading().thenStart("readAsync", null, (Task<Void, NoException> t) -> {
try {
continueReadAsync(s, i, j, sp);
} catch (Exception e) {
sp.error(e);
}
return null;
}, true);
return;
}
if (c != (char)(testBuf[iChar]&0xFF))
throw new Exception("Invalid character " + c + " at "+ (iBuf * testBuf.length + iChar));
if (++iChar >= testBuf.length) {
iBuf++;
iChar = 0;
}
}
sp.unblock();
}
@Test
public void testNextBufferAsync() throws Exception {
ICharacterStream.Readable.Buffered s = openStream(openFile());
int iBuf = 0;
int iChar = 0;
while (iBuf < nbBuf) {
Chars.Readable str = s.readNextBufferAsync().blockResult(0);
Assert.assertNotNull(str);
while (str.hasRemaining()) {
Assert.assertEquals(testBuf[iChar] & 0xFF, str.get());
if (++iChar == testBuf.length) {
iChar = 0;
iBuf++;
}
}
}
Assert.assertEquals(nbBuf, iBuf);
Assert.assertEquals(0, iChar);
Assert.assertNull(s.readNextBufferAsync().blockResult(0));
s.back('z');
Chars.Readable str = s.readNextBufferAsync().blockResult(0);
Assert.assertNotNull(str);
Assert.assertEquals(1, str.remaining());
Assert.assertEquals('z', str.get());
Assert.assertNull(s.readNextBufferAsync().blockResult(0));
s.close();
}
@Test
public void testNextBuffer() throws Exception {
ICharacterStream.Readable.Buffered s = openStream(openFile());
int iBuf = 0;
int iChar = 0;
while (iBuf < nbBuf) {
Chars.Readable str = s.readNextBuffer();
Assert.assertNotNull(str);
while (str.hasRemaining()) {
Assert.assertEquals(testBuf[iChar] & 0xFF, str.get());
if (++iChar == testBuf.length) {
iChar = 0;
iBuf++;
}
}
}
Assert.assertEquals(nbBuf, iBuf);
Assert.assertEquals(0, iChar);
Assert.assertNull(s.readNextBuffer());
s.back('z');
Chars.Readable str = s.readNextBuffer();
Assert.assertNotNull(str);
Assert.assertEquals(1, str.remaining());
Assert.assertEquals('z', str.get());
Assert.assertNull(s.readNextBuffer());
s.close();
}
@Test
public void testReadUntil() throws Exception {
ICharacterStream.Readable.Buffered s = openStream(openFile());
int iBuf = 0;
int iChar = 0;
while (iBuf < nbBuf) {
CharArrayStringBuffer str = new CharArrayStringBuffer();
char endChar = (char)testBuf[(iBuf + 17 + iChar) % testBuf.length];
boolean found = s.readUntil(endChar, str);
int i = 0;
boolean foundExpected = false;
do {
if (testBuf[iChar] == endChar) {
Assert.assertEquals(i, str.length());
if (++iChar == testBuf.length) {
iChar = 0;
iBuf++;
}
foundExpected = true;
break;
}
Assert.assertTrue(str.length() > i);
Assert.assertEquals(testBuf[iChar], str.charAt(i++));
if (++iChar == testBuf.length) {
iChar = 0;
iBuf++;
}
} while (iBuf < nbBuf);
Assert.assertTrue(foundExpected == found);
}
CharArrayStringBuffer str = new CharArrayStringBuffer();
boolean found = s.readUntil('m', str);
Assert.assertFalse(found);
Assert.assertEquals(0, str.length());
s.close();
if (nbBuf <= 100) {
s = openStream(openFile());
str = new CharArrayStringBuffer();
found = s.readUntil('$', str);
s.close();
Assert.assertFalse(found);
Assert.assertEquals(nbBuf * testBuf.length, str.length());
}
}
@Test
public void testReadUntilAsync() throws Exception {
ICharacterStream.Readable.Buffered s = openStream(openFile());
int iBuf = 0;
int iChar = 0;
while (iBuf < nbBuf) {
CharArrayStringBuffer str = new CharArrayStringBuffer();
char endChar = (char)testBuf[(iBuf + (testBuf.length * 2 / 3 - 1) + iChar) % testBuf.length];
boolean found = s.readUntilAsync(endChar, str).blockResult(10000).booleanValue();
int i = 0;
boolean foundExpected = false;
do {
if (testBuf[iChar] == endChar) {
Assert.assertEquals(i, str.length());
if (++iChar == testBuf.length) {
iChar = 0;
iBuf++;
}
foundExpected = true;
break;
}
Assert.assertTrue(str.length() > i);
Assert.assertEquals(testBuf[iChar], str.charAt(i++));
if (++iChar == testBuf.length) {
iChar = 0;
iBuf++;
}
} while (iBuf < nbBuf);
Assert.assertTrue(foundExpected == found);
}
CharArrayStringBuffer str = new CharArrayStringBuffer();
boolean found = s.readUntilAsync('m', str).blockResult(10000).booleanValue();
Assert.assertFalse(found);
Assert.assertEquals(0, str.length());
s.close();
if (nbBuf <= 100) {
s = openStream(openFile());
str = new CharArrayStringBuffer();
found = s.readUntilAsync('$', str).blockResult(10000).booleanValue();
s.close();
Assert.assertFalse(found);
Assert.assertEquals(nbBuf * testBuf.length, str.length());
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.io.debezium;
import static org.apache.commons.lang.StringUtils.isBlank;
import io.debezium.annotation.ThreadSafe;
import io.debezium.config.Configuration;
import io.debezium.config.Field;
import io.debezium.document.DocumentReader;
import io.debezium.relational.history.AbstractDatabaseHistory;
import io.debezium.relational.history.DatabaseHistory;
import io.debezium.relational.history.DatabaseHistoryException;
import io.debezium.relational.history.DatabaseHistoryListener;
import io.debezium.relational.history.HistoryRecord;
import io.debezium.relational.history.HistoryRecordComparator;
import java.io.IOException;
import java.util.UUID;
import java.util.function.Consumer;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.common.config.ConfigDef.Importance;
import org.apache.kafka.common.config.ConfigDef.Type;
import org.apache.kafka.common.config.ConfigDef.Width;
import org.apache.pulsar.client.api.Message;
import org.apache.pulsar.client.api.MessageId;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.PulsarClient;
import org.apache.pulsar.client.api.PulsarClientException;
import org.apache.pulsar.client.api.Reader;
import org.apache.pulsar.client.api.Schema;
/**
* A {@link DatabaseHistory} implementation that records schema changes as normal pulsar messages on the specified topic,
* and that recovers the history by establishing a Kafka Consumer re-processing all messages on that topic.
*/
@Slf4j
@ThreadSafe
public final class PulsarDatabaseHistory extends AbstractDatabaseHistory {
public static final Field TOPIC = Field.create(CONFIGURATION_FIELD_PREFIX_STRING + "pulsar.topic")
.withDisplayName("Database history topic name")
.withType(Type.STRING)
.withWidth(Width.LONG)
.withImportance(Importance.HIGH)
.withDescription("The name of the topic for the database schema history")
.withValidation(Field::isRequired);
public static final Field SERVICE_URL = Field.create(CONFIGURATION_FIELD_PREFIX_STRING + "pulsar.service.url")
.withDisplayName("Pulsar broker addresses")
.withType(Type.STRING)
.withWidth(Width.LONG)
.withImportance(Importance.HIGH)
.withDescription("Pulsar service url")
.withValidation(Field::isRequired);
public static Field.Set ALL_FIELDS = Field.setOf(
TOPIC,
SERVICE_URL,
DatabaseHistory.NAME);
private final DocumentReader reader = DocumentReader.defaultReader();
private String topicName;
private String serviceUrl;
private String dbHistoryName;
private volatile PulsarClient pulsarClient;
private volatile Producer<String> producer;
@Override
public void configure(Configuration config, HistoryRecordComparator comparator, DatabaseHistoryListener listener) {
super.configure(config, comparator, listener);
if (!config.validateAndRecord(ALL_FIELDS, logger::error)) {
throw new IllegalArgumentException("Error configuring an instance of "
+ getClass().getSimpleName() + "; check the logs for details");
}
this.topicName = config.getString(TOPIC);
this.serviceUrl = config.getString(SERVICE_URL);
// Copy the relevant portions of the configuration and add useful defaults ...
this.dbHistoryName = config.getString(DatabaseHistory.NAME, UUID.randomUUID().toString());
log.info("Configure to store the debezium database history {} to pulsar topic {} at {}",
dbHistoryName, topicName, serviceUrl);
}
@Override
public void initializeStorage() {
super.initializeStorage();
// try simple to publish an empty string to create topic
try (Producer<String> p = pulsarClient.newProducer(Schema.STRING).topic(topicName).create()) {
p.send("");
} catch (PulsarClientException pce) {
log.error("Failed to initialize storage", pce);
throw new RuntimeException("Failed to initialize storage", pce);
}
}
void setupClientIfNeeded() {
if (null == this.pulsarClient) {
try {
pulsarClient = PulsarClient.builder()
.serviceUrl(serviceUrl)
.build();
} catch (PulsarClientException e) {
throw new RuntimeException("Failed to create pulsar client to pulsar cluster at "
+ serviceUrl, e);
}
}
}
void setupProducerIfNeeded() {
setupClientIfNeeded();
if (null == this.producer) {
try {
this.producer = pulsarClient.newProducer(Schema.STRING)
.topic(topicName)
.producerName(dbHistoryName)
.blockIfQueueFull(true)
.create();
} catch (PulsarClientException e) {
log.error("Failed to create pulsar producer to topic '{}' at cluster '{}'", topicName, serviceUrl);
throw new RuntimeException("Failed to create pulsar producer to topic '"
+ topicName + "' at cluster '" + serviceUrl + "'", e);
}
}
}
@Override
public void start() {
super.start();
setupProducerIfNeeded();
}
@Override
protected void storeRecord(HistoryRecord record) throws DatabaseHistoryException {
if (this.producer == null) {
throw new IllegalStateException("No producer is available. Ensure that 'start()'" +
" is called before storing database history records.");
}
if (log.isTraceEnabled()) {
log.trace("Storing record into database history: {}", record);
}
try {
producer.send(record.toString());
} catch (PulsarClientException e) {
throw new DatabaseHistoryException(e);
}
}
@Override
public void stop() {
try {
if (this.producer != null) {
try {
producer.flush();
} catch (PulsarClientException pce) {
// ignore the error to ensure the client is eventually closed
} finally {
this.producer.close();
}
this.producer = null;
}
if (this.pulsarClient != null) {
pulsarClient.close();
this.pulsarClient = null;
}
} catch (PulsarClientException pe) {
log.warn("Failed to closing pulsar client", pe);
}
}
@Override
protected void recoverRecords(Consumer<HistoryRecord> records) {
setupClientIfNeeded();
try (Reader<String> historyReader = pulsarClient.newReader(Schema.STRING)
.topic(topicName)
.startMessageId(MessageId.earliest)
.create()
) {
log.info("Scanning the database history topic '{}'", topicName);
// Read all messages in the topic ...
MessageId lastProcessedMessageId = null;
// read the topic until the end
while (historyReader.hasMessageAvailable()) {
Message<String> msg = historyReader.readNext();
try {
if (null == lastProcessedMessageId || lastProcessedMessageId.compareTo(msg.getMessageId()) < 0) {
if (!isBlank(msg.getValue())) {
HistoryRecord recordObj = new HistoryRecord(reader.read(msg.getValue()));
if (log.isTraceEnabled()) {
log.trace("Recovering database history: {}", recordObj);
}
if (recordObj == null || !recordObj.isValid()) {
log.warn("Skipping invalid database history record '{}'. " +
"This is often not an issue, but if it happens repeatedly please check the '{}' topic.",
recordObj, topicName);
} else {
records.accept(recordObj);
log.trace("Recovered database history: {}", recordObj);
}
}
lastProcessedMessageId = msg.getMessageId();
}
} catch (IOException ioe) {
log.error("Error while deserializing history record '{}'", msg.getValue(), ioe);
} catch (final Exception e) {
throw e;
}
}
log.info("Successfully completed scanning the database history topic '{}'", topicName);
} catch (IOException ioe) {
log.error("Encountered issues on recovering history records", ioe);
throw new RuntimeException("Encountered issues on recovering history records", ioe);
}
}
@Override
public boolean exists() {
setupClientIfNeeded();
try (Reader<String> historyReader = pulsarClient.newReader(Schema.STRING)
.topic(topicName)
.startMessageId(MessageId.earliest)
.create()
) {
return historyReader.hasMessageAvailable();
} catch (IOException e) {
log.error("Encountered issues on checking existence of database history", e);
throw new RuntimeException("Encountered issues on checking existence of database history", e);
}
}
@Override
public String toString() {
if (topicName != null) {
return "Pulsar topic (" + topicName + ") at " + serviceUrl;
}
return "Pulsar topic";
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lucene.search.suggest.analyzing;
import com.carrotsearch.hppc.ObjectIntHashMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.TokenStreamToAutomaton;
import org.apache.lucene.search.suggest.InputIterator;
import org.apache.lucene.search.suggest.Lookup;
import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.store.DataInput;
import org.apache.lucene.store.DataOutput;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.InputStreamDataInput;
import org.apache.lucene.store.OutputStreamDataOutput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.IntsRefBuilder;
import org.apache.lucene.util.OfflineSorter;
import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.LimitedFiniteStringsIterator;
import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.Transition;
import org.apache.lucene.util.fst.Builder;
import org.apache.lucene.util.fst.ByteSequenceOutputs;
import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.FST.BytesReader;
import org.apache.lucene.util.fst.PairOutputs;
import org.apache.lucene.util.fst.PairOutputs.Pair;
import org.apache.lucene.util.fst.PositiveIntOutputs;
import org.apache.lucene.util.fst.Util;
import org.apache.lucene.util.fst.Util.Result;
import org.apache.lucene.util.fst.Util.TopResults;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.collect.HppcMaps;
import org.elasticsearch.common.io.PathUtils;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
/**
* Suggester that first analyzes the surface form, adds the
* analyzed form to a weighted FST, and then does the same
* thing at lookup time. This means lookup is based on the
* analyzed form while suggestions are still the surface
* form(s).
*
* <p>
* This can result in powerful suggester functionality. For
* example, if you use an analyzer removing stop words,
* then the partial text "ghost chr..." could see the
* suggestion "The Ghost of Christmas Past". Note that
* position increments MUST NOT be preserved for this example
* to work, so you should call the constructor with
* <code>preservePositionIncrements</code> parameter set to
* false
*
* <p>
* If SynonymFilter is used to map wifi and wireless network to
* hotspot then the partial text "wirele..." could suggest
* "wifi router". Token normalization like stemmers, accent
* removal, etc., would allow suggestions to ignore such
* variations.
*
* <p>
* When two matching suggestions have the same weight, they
* are tie-broken by the analyzed form. If their analyzed
* form is the same then the order is undefined.
*
* <p>
* There are some limitations:
* <ul>
*
* <li> A lookup from a query like "net" in English won't
* be any different than "net " (ie, user added a
* trailing space) because analyzers don't reflect
* when they've seen a token separator and when they
* haven't.
*
* <li> If you're using {@code StopFilter}, and the user will
* type "fast apple", but so far all they've typed is
* "fast a", again because the analyzer doesn't convey whether
* it's seen a token separator after the "a",
* {@code StopFilter} will remove that "a" causing
* far more matches than you'd expect.
*
* <li> Lookups with the empty string return no results
* instead of all results.
* </ul>
*/
public class XAnalyzingSuggester extends Lookup {
/**
* FST<Weight,Surface>:
* input is the analyzed form, with a null byte between terms
* weights are encoded as costs: (Integer.MAX_VALUE-weight)
* surface is the original, unanalyzed form.
*/
private FST<Pair<Long,BytesRef>> fst = null;
/**
* Analyzer that will be used for analyzing suggestions at
* index time.
*/
private final Analyzer indexAnalyzer;
/**
* Analyzer that will be used for analyzing suggestions at
* query time.
*/
private final Analyzer queryAnalyzer;
/**
* True if exact match suggestions should always be returned first.
*/
private final boolean exactFirst;
/**
* True if separator between tokens should be preserved.
*/
private final boolean preserveSep;
/** Include this flag in the options parameter to {@code
* #XAnalyzingSuggester(Analyzer,Analyzer,int,int,int,boolean,FST,boolean,int,int,int,int,int)} to always
* return the exact match first, regardless of score. This
* has no performance impact but could result in
* low-quality suggestions. */
public static final int EXACT_FIRST = 1;
/** Include this flag in the options parameter to {@code
* #XAnalyzingSuggester(Analyzer,Analyzer,int,int,int,boolean,FST,boolean,int,int,int,int,int)} to preserve
* token separators when matching. */
public static final int PRESERVE_SEP = 2;
/** Represents the separation between tokens, if
* PRESERVE_SEP was specified */
public static final int SEP_LABEL = '\u001F';
/** Marks end of the analyzed input and start of dedup
* byte. */
public static final int END_BYTE = 0x0;
/** Maximum number of dup surface forms (different surface
* forms for the same analyzed form). */
private final int maxSurfaceFormsPerAnalyzedForm;
/** Maximum graph paths to index for a single analyzed
* surface form. This only matters if your analyzer
* makes lots of alternate paths (e.g. contains
* SynonymFilter). */
private final int maxGraphExpansions;
/** Highest number of analyzed paths we saw for any single
* input surface form. For analyzers that never create
* graphs this will always be 1. */
private int maxAnalyzedPathsForOneInput;
private boolean hasPayloads;
private final int sepLabel;
private final int payloadSep;
private final int endByte;
private final int holeCharacter;
public static final int PAYLOAD_SEP = '\u001F';
public static final int HOLE_CHARACTER = '\u001E';
private final Automaton queryPrefix;
/** Whether position holes should appear in the automaton. */
private boolean preservePositionIncrements;
/** Number of entries the lookup was built with */
private long count = 0;
/**
* Calls {@code #XAnalyzingSuggester(Analyzer,Analyzer,int,int,int,boolean,FST,boolean,int,int,int,int,int)
* AnalyzingSuggester(analyzer, analyzer, EXACT_FIRST |
* PRESERVE_SEP, 256, -1)}
*
* @param analyzer Analyzer that will be used for analyzing suggestions while building the index.
*/
public XAnalyzingSuggester(Analyzer analyzer) {
this(analyzer, null, analyzer, EXACT_FIRST | PRESERVE_SEP, 256, -1, true, null, false, 0,
SEP_LABEL, PAYLOAD_SEP, END_BYTE, HOLE_CHARACTER);
}
/**
* Calls {@code #XAnalyzingSuggester(Analyzer,Analyzer,int,int,int,boolean,FST,boolean,int,int,int,int,int)
* AnalyzingSuggester(indexAnalyzer, queryAnalyzer, EXACT_FIRST |
* PRESERVE_SEP, 256, -1)}
*
* @param indexAnalyzer Analyzer that will be used for analyzing suggestions while building the index.
* @param queryAnalyzer Analyzer that will be used for analyzing query text during lookup
*/
public XAnalyzingSuggester(Analyzer indexAnalyzer, Analyzer queryAnalyzer) {
this(indexAnalyzer, null, queryAnalyzer, EXACT_FIRST | PRESERVE_SEP, 256, -1, true, null, false, 0,
SEP_LABEL, PAYLOAD_SEP, END_BYTE, HOLE_CHARACTER);
}
/**
* Creates a new suggester.
*
* @param indexAnalyzer Analyzer that will be used for
* analyzing suggestions while building the index.
* @param queryAnalyzer Analyzer that will be used for
* analyzing query text during lookup
* @param options see {@link #EXACT_FIRST}, {@link #PRESERVE_SEP}
* @param maxSurfaceFormsPerAnalyzedForm Maximum number of
* surface forms to keep for a single analyzed form.
* When there are too many surface forms we discard the
* lowest weighted ones.
* @param maxGraphExpansions Maximum number of graph paths
* to expand from the analyzed form. Set this to -1 for
* no limit.
*/
public XAnalyzingSuggester(Analyzer indexAnalyzer, Automaton queryPrefix, Analyzer queryAnalyzer,
int options, int maxSurfaceFormsPerAnalyzedForm, int maxGraphExpansions,
boolean preservePositionIncrements, FST<Pair<Long, BytesRef>> fst,
boolean hasPayloads, int maxAnalyzedPathsForOneInput,
int sepLabel, int payloadSep, int endByte, int holeCharacter) {
// SIMON EDIT: I added fst, hasPayloads and maxAnalyzedPathsForOneInput
this.indexAnalyzer = indexAnalyzer;
this.queryAnalyzer = queryAnalyzer;
this.fst = fst;
this.hasPayloads = hasPayloads;
if ((options & ~(EXACT_FIRST | PRESERVE_SEP)) != 0) {
throw new IllegalArgumentException("options should only contain EXACT_FIRST and PRESERVE_SEP; got " + options);
}
this.exactFirst = (options & EXACT_FIRST) != 0;
this.preserveSep = (options & PRESERVE_SEP) != 0;
// FLORIAN EDIT: I added <code>queryPrefix</code> for context dependent suggestions
this.queryPrefix = queryPrefix;
// NOTE: this is just an implementation limitation; if
// somehow this is a problem we could fix it by using
// more than one byte to disambiguate ... but 256 seems
// like it should be way more then enough.
if (maxSurfaceFormsPerAnalyzedForm <= 0 || maxSurfaceFormsPerAnalyzedForm > 256) {
throw new IllegalArgumentException(
"maxSurfaceFormsPerAnalyzedForm must be > 0 and < 256 (got: " + maxSurfaceFormsPerAnalyzedForm + ")");
}
this.maxSurfaceFormsPerAnalyzedForm = maxSurfaceFormsPerAnalyzedForm;
if (maxGraphExpansions < 1 && maxGraphExpansions != -1) {
throw new IllegalArgumentException(
"maxGraphExpansions must -1 (no limit) or > 0 (got: " + maxGraphExpansions + ")");
}
this.maxGraphExpansions = maxGraphExpansions;
this.maxAnalyzedPathsForOneInput = maxAnalyzedPathsForOneInput;
this.preservePositionIncrements = preservePositionIncrements;
this.sepLabel = sepLabel;
this.payloadSep = payloadSep;
this.endByte = endByte;
this.holeCharacter = holeCharacter;
}
/** Returns byte size of the underlying FST. */
@Override
public long ramBytesUsed() {
return fst == null ? 0 : fst.ramBytesUsed();
}
public int getMaxAnalyzedPathsForOneInput() {
return maxAnalyzedPathsForOneInput;
}
// Replaces SEP with epsilon or remaps them if
// we were asked to preserve them:
private Automaton replaceSep(Automaton a) {
Automaton result = new Automaton();
// Copy all states over
int numStates = a.getNumStates();
for(int s=0;s<numStates;s++) {
result.createState();
result.setAccept(s, a.isAccept(s));
}
// Go in reverse topo sort so we know we only have to
// make one pass:
Transition t = new Transition();
int[] topoSortStates = topoSortStates(a);
for(int i=0;i<topoSortStates.length;i++) {
int state = topoSortStates[topoSortStates.length-1-i];
int count = a.initTransition(state, t);
for(int j=0;j<count;j++) {
a.getNextTransition(t);
if (t.min == TokenStreamToAutomaton.POS_SEP) {
assert t.max == TokenStreamToAutomaton.POS_SEP;
if (preserveSep) {
// Remap to SEP_LABEL:
result.addTransition(state, t.dest, SEP_LABEL);
} else {
result.addEpsilon(state, t.dest);
}
} else if (t.min == TokenStreamToAutomaton.HOLE) {
assert t.max == TokenStreamToAutomaton.HOLE;
// Just remove the hole: there will then be two
// SEP tokens next to each other, which will only
// match another hole at search time. Note that
// it will also match an empty-string token ... if
// that's somehow a problem we can always map HOLE
// to a dedicated byte (and escape it in the
// input).
result.addEpsilon(state, t.dest);
} else {
result.addTransition(state, t.dest, t.min, t.max);
}
}
}
result.finishState();
return result;
}
protected Automaton convertAutomaton(Automaton a) {
if (queryPrefix != null) {
a = Operations.concatenate(Arrays.asList(queryPrefix, a));
// This automaton should not blow up during determinize:
a = Operations.determinize(a, Integer.MAX_VALUE);
}
return a;
}
private int[] topoSortStates(Automaton a) {
int[] states = new int[a.getNumStates()];
final Set<Integer> visited = new HashSet<>();
final LinkedList<Integer> worklist = new LinkedList<>();
worklist.add(0);
visited.add(0);
int upto = 0;
states[upto] = 0;
upto++;
Transition t = new Transition();
while (worklist.size() > 0) {
int s = worklist.removeFirst();
int count = a.initTransition(s, t);
for (int i=0;i<count;i++) {
a.getNextTransition(t);
if (!visited.contains(t.dest)) {
visited.add(t.dest);
worklist.add(t.dest);
states[upto++] = t.dest;
}
}
}
return states;
}
/** Just escapes the 0xff byte (which we still for SEP). */
private static final class EscapingTokenStreamToAutomaton extends TokenStreamToAutomaton {
final BytesRefBuilder spare = new BytesRefBuilder();
private char sepLabel;
EscapingTokenStreamToAutomaton(char sepLabel) {
this.sepLabel = sepLabel;
}
@Override
protected BytesRef changeToken(BytesRef in) {
int upto = 0;
for(int i=0;i<in.length;i++) {
byte b = in.bytes[in.offset+i];
if (b == (byte) sepLabel) {
spare.grow(upto+2);
spare.setByteAt(upto++, (byte) sepLabel);
spare.setByteAt(upto++, b);
} else {
spare.grow(upto+1);
spare.setByteAt(upto++, b);
}
}
spare.setLength(upto);
return spare.get();
}
}
public TokenStreamToAutomaton getTokenStreamToAutomaton() {
final TokenStreamToAutomaton tsta;
if (preserveSep) {
tsta = new EscapingTokenStreamToAutomaton((char) sepLabel);
} else {
// When we're not preserving sep, we don't steal 0xff
// byte, so we don't need to do any escaping:
tsta = new TokenStreamToAutomaton();
}
tsta.setPreservePositionIncrements(preservePositionIncrements);
return tsta;
}
private static class AnalyzingComparator implements Comparator<BytesRef> {
private final boolean hasPayloads;
AnalyzingComparator(boolean hasPayloads) {
this.hasPayloads = hasPayloads;
}
private final ByteArrayDataInput readerA = new ByteArrayDataInput();
private final ByteArrayDataInput readerB = new ByteArrayDataInput();
private final BytesRef scratchA = new BytesRef();
private final BytesRef scratchB = new BytesRef();
@Override
public int compare(BytesRef a, BytesRef b) {
// First by analyzed form:
readerA.reset(a.bytes, a.offset, a.length);
scratchA.length = readerA.readShort();
scratchA.bytes = a.bytes;
scratchA.offset = readerA.getPosition();
readerB.reset(b.bytes, b.offset, b.length);
scratchB.bytes = b.bytes;
scratchB.length = readerB.readShort();
scratchB.offset = readerB.getPosition();
int cmp = scratchA.compareTo(scratchB);
if (cmp != 0) {
return cmp;
}
readerA.skipBytes(scratchA.length);
readerB.skipBytes(scratchB.length);
// Next by cost:
long aCost = readerA.readInt();
long bCost = readerB.readInt();
if (aCost < bCost) {
return -1;
} else if (aCost > bCost) {
return 1;
}
// Finally by surface form:
if (hasPayloads) {
scratchA.length = readerA.readShort();
scratchA.offset = readerA.getPosition();
scratchB.length = readerB.readShort();
scratchB.offset = readerB.getPosition();
} else {
scratchA.offset = readerA.getPosition();
scratchA.length = a.length - scratchA.offset;
scratchB.offset = readerB.getPosition();
scratchB.length = b.length - scratchB.offset;
}
return scratchA.compareTo(scratchB);
}
}
/** Non-null if this sugggester created a temp dir, needed only during build */
private static FSDirectory tmpBuildDir;
@SuppressForbidden(reason = "access temp directory for building index")
protected static synchronized FSDirectory getTempDir() {
if (tmpBuildDir == null) {
// Lazy init
String tempDirPath = System.getProperty("java.io.tmpdir");
if (tempDirPath == null) {
throw new RuntimeException("Java has no temporary folder property (java.io.tmpdir)?");
}
try {
tmpBuildDir = FSDirectory.open(PathUtils.get(tempDirPath));
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
return tmpBuildDir;
}
@Override
public void build(InputIterator iterator) throws IOException {
String prefix = getClass().getSimpleName();
Directory tempDir = getTempDir();
OfflineSorter sorter = new OfflineSorter(tempDir, prefix, new AnalyzingComparator(hasPayloads));
IndexOutput tempInput = tempDir.createTempOutput(prefix, "input", IOContext.DEFAULT);
OfflineSorter.ByteSequencesWriter writer = new OfflineSorter.ByteSequencesWriter(tempInput);
OfflineSorter.ByteSequencesReader reader = null;
hasPayloads = iterator.hasPayloads();
BytesRefBuilder scratch = new BytesRefBuilder();
TokenStreamToAutomaton ts2a = getTokenStreamToAutomaton();
String tempSortedFileName = null;
count = 0;
byte buffer[] = new byte[8];
try {
ByteArrayDataOutput output = new ByteArrayDataOutput(buffer);
for (BytesRef surfaceForm; (surfaceForm = iterator.next()) != null;) {
LimitedFiniteStringsIterator finiteStrings =
new LimitedFiniteStringsIterator(toAutomaton(surfaceForm, ts2a), maxGraphExpansions);
for (IntsRef string; (string = finiteStrings.next()) != null; count++) {
Util.toBytesRef(string, scratch);
// length of the analyzed text (FST input)
if (scratch.length() > Short.MAX_VALUE-2) {
throw new IllegalArgumentException(
"cannot handle analyzed forms > " + (Short.MAX_VALUE-2) + " in length (got " + scratch.length() + ")");
}
short analyzedLength = (short) scratch.length();
// compute the required length:
// analyzed sequence + weight (4) + surface + analyzedLength (short)
int requiredLength = analyzedLength + 4 + surfaceForm.length + 2;
BytesRef payload;
if (hasPayloads) {
if (surfaceForm.length > (Short.MAX_VALUE-2)) {
throw new IllegalArgumentException(
"cannot handle surface form > " + (Short.MAX_VALUE-2) + " in length (got " + surfaceForm.length + ")");
}
payload = iterator.payload();
// payload + surfaceLength (short)
requiredLength += payload.length + 2;
} else {
payload = null;
}
buffer = ArrayUtil.grow(buffer, requiredLength);
output.reset(buffer);
output.writeShort(analyzedLength);
output.writeBytes(scratch.bytes(), 0, scratch.length());
output.writeInt(encodeWeight(iterator.weight()));
if (hasPayloads) {
for(int i=0;i<surfaceForm.length;i++) {
if (surfaceForm.bytes[i] == payloadSep) {
throw new IllegalArgumentException(
"surface form cannot contain unit separator character U+001F; this character is reserved");
}
}
output.writeShort((short) surfaceForm.length);
output.writeBytes(surfaceForm.bytes, surfaceForm.offset, surfaceForm.length);
output.writeBytes(payload.bytes, payload.offset, payload.length);
} else {
output.writeBytes(surfaceForm.bytes, surfaceForm.offset, surfaceForm.length);
}
assert output.getPosition() == requiredLength: output.getPosition() + " vs " + requiredLength;
writer.write(buffer, 0, output.getPosition());
}
maxAnalyzedPathsForOneInput = Math.max(maxAnalyzedPathsForOneInput, finiteStrings.size());
}
writer.close();
// Sort all input/output pairs (required by FST.Builder):
tempSortedFileName = sorter.sort(tempInput.getName());
// Free disk space:
tempDir.deleteFile(tempInput.getName());
reader = new OfflineSorter.ByteSequencesReader(
tempDir.openChecksumInput(tempSortedFileName, IOContext.READONCE), prefix);
PairOutputs<Long,BytesRef> outputs = new PairOutputs<>(
PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton());
Builder<Pair<Long,BytesRef>> builder = new Builder<>(FST.INPUT_TYPE.BYTE1, outputs);
// Build FST:
BytesRefBuilder previousAnalyzed = null;
BytesRefBuilder analyzed = new BytesRefBuilder();
BytesRef surface = new BytesRef();
IntsRefBuilder scratchInts = new IntsRefBuilder();
ByteArrayDataInput input = new ByteArrayDataInput();
// Used to remove duplicate surface forms (but we
// still index the hightest-weight one). We clear
// this when we see a new analyzed form, so it cannot
// grow unbounded (at most 256 entries):
Set<BytesRef> seenSurfaceForms = new HashSet<>();
int dedup = 0;
while (true) {
BytesRef bytes = reader.next();
if (bytes == null) {
break;
}
input.reset(bytes.bytes, bytes.offset, bytes.length);
short analyzedLength = input.readShort();
analyzed.grow(analyzedLength+2);
input.readBytes(analyzed.bytes(), 0, analyzedLength);
analyzed.setLength(analyzedLength);
long cost = input.readInt();
surface.bytes = bytes.bytes;
if (hasPayloads) {
surface.length = input.readShort();
surface.offset = input.getPosition();
} else {
surface.offset = input.getPosition();
surface.length = bytes.length - surface.offset;
}
if (previousAnalyzed == null) {
previousAnalyzed = new BytesRefBuilder();
previousAnalyzed.copyBytes(analyzed);
seenSurfaceForms.add(BytesRef.deepCopyOf(surface));
} else if (analyzed.get().equals(previousAnalyzed.get())) {
dedup++;
if (dedup >= maxSurfaceFormsPerAnalyzedForm) {
// More than maxSurfaceFormsPerAnalyzedForm
// dups: skip the rest:
continue;
}
if (seenSurfaceForms.contains(surface)) {
continue;
}
seenSurfaceForms.add(BytesRef.deepCopyOf(surface));
} else {
dedup = 0;
previousAnalyzed.copyBytes(analyzed);
seenSurfaceForms.clear();
seenSurfaceForms.add(BytesRef.deepCopyOf(surface));
}
// TODO: I think we can avoid the extra 2 bytes when
// there is no dup (dedup==0), but we'd have to fix
// the exactFirst logic ... which would be sort of
// hairy because we'd need to special case the two
// (dup/not dup)...
// NOTE: must be byte 0 so we sort before whatever
// is next
analyzed.append((byte) 0);
analyzed.append((byte) dedup);
Util.toIntsRef(analyzed.get(), scratchInts);
//System.out.println("ADD: " + scratchInts + " -> " + cost + ": " + surface.utf8ToString());
if (!hasPayloads) {
builder.add(scratchInts.get(), outputs.newPair(cost, BytesRef.deepCopyOf(surface)));
} else {
int payloadOffset = input.getPosition() + surface.length;
int payloadLength = bytes.length - payloadOffset;
BytesRef br = new BytesRef(surface.length + 1 + payloadLength);
System.arraycopy(surface.bytes, surface.offset, br.bytes, 0, surface.length);
br.bytes[surface.length] = (byte) payloadSep;
System.arraycopy(bytes.bytes, payloadOffset, br.bytes, surface.length+1, payloadLength);
br.length = br.bytes.length;
builder.add(scratchInts.get(), outputs.newPair(cost, br));
}
}
fst = builder.finish();
//PrintWriter pw = new PrintWriter("/tmp/out.dot");
//Util.toDot(fst, pw, true, true);
//pw.close();
} finally {
IOUtils.closeWhileHandlingException(reader, writer);
IOUtils.deleteFilesIgnoringExceptions(tempDir, tempInput.getName(), tempSortedFileName);
}
}
@Override
public boolean store(OutputStream output) throws IOException {
DataOutput dataOut = new OutputStreamDataOutput(output);
try {
if (fst == null) {
return false;
}
fst.save(dataOut);
dataOut.writeVInt(maxAnalyzedPathsForOneInput);
dataOut.writeByte((byte) (hasPayloads ? 1 : 0));
} finally {
IOUtils.close(output);
}
return true;
}
@Override
public long getCount() {
return count;
}
@Override
public boolean load(InputStream input) throws IOException {
DataInput dataIn = new InputStreamDataInput(input);
try {
this.fst = new FST<>(dataIn, new PairOutputs<>(
PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton()));
maxAnalyzedPathsForOneInput = dataIn.readVInt();
hasPayloads = dataIn.readByte() == 1;
} finally {
IOUtils.close(input);
}
return true;
}
private LookupResult getLookupResult(Long output1, BytesRef output2, CharsRefBuilder spare) {
LookupResult result;
if (hasPayloads) {
int sepIndex = -1;
for(int i=0;i<output2.length;i++) {
if (output2.bytes[output2.offset+i] == payloadSep) {
sepIndex = i;
break;
}
}
assert sepIndex != -1;
final int payloadLen = output2.length - sepIndex - 1;
spare.copyUTF8Bytes(output2.bytes, output2.offset, sepIndex);
BytesRef payload = new BytesRef(payloadLen);
System.arraycopy(output2.bytes, sepIndex+1, payload.bytes, 0, payloadLen);
payload.length = payloadLen;
result = new LookupResult(spare.toString(), decodeWeight(output1), payload);
} else {
spare.copyUTF8Bytes(output2);
result = new LookupResult(spare.toString(), decodeWeight(output1));
}
return result;
}
private boolean sameSurfaceForm(BytesRef key, BytesRef output2) {
if (hasPayloads) {
// output2 has at least PAYLOAD_SEP byte:
if (key.length >= output2.length) {
return false;
}
for(int i=0;i<key.length;i++) {
if (key.bytes[key.offset+i] != output2.bytes[output2.offset+i]) {
return false;
}
}
return output2.bytes[output2.offset + key.length] == payloadSep;
} else {
return key.bytesEquals(output2);
}
}
@Override
public List<LookupResult> lookup(final CharSequence key, Set<BytesRef> contexts, boolean onlyMorePopular, int num) {
assert num > 0;
if (onlyMorePopular) {
throw new IllegalArgumentException("this suggester only works with onlyMorePopular=false");
}
if (fst == null) {
return Collections.emptyList();
}
//System.out.println("lookup key=" + key + " num=" + num);
for (int i = 0; i < key.length(); i++) {
if (key.charAt(i) == holeCharacter) {
throw new IllegalArgumentException(
"lookup key cannot contain HOLE character U+001E; this character is reserved");
}
if (key.charAt(i) == sepLabel) {
throw new IllegalArgumentException(
"lookup key cannot contain unit separator character U+001F; this character is reserved");
}
}
final BytesRef utf8Key = new BytesRef(key);
try {
Automaton lookupAutomaton = toLookupAutomaton(key);
final CharsRefBuilder spare = new CharsRefBuilder();
//System.out.println(" now intersect exactFirst=" + exactFirst);
// Intersect automaton w/ suggest wFST and get all
// prefix starting nodes & their outputs:
//final PathIntersector intersector = getPathIntersector(lookupAutomaton, fst);
//System.out.println(" prefixPaths: " + prefixPaths.size());
BytesReader bytesReader = fst.getBytesReader();
FST.Arc<Pair<Long,BytesRef>> scratchArc = new FST.Arc<>();
final List<LookupResult> results = new ArrayList<>();
List<FSTUtil.Path<Pair<Long,BytesRef>>> prefixPaths = FSTUtil.intersectPrefixPaths(convertAutomaton(lookupAutomaton), fst);
if (exactFirst) {
int count = 0;
for (FSTUtil.Path<Pair<Long,BytesRef>> path : prefixPaths) {
if (fst.findTargetArc(endByte, path.fstNode, scratchArc, bytesReader) != null) {
// This node has END_BYTE arc leaving, meaning it's an
// "exact" match:
count++;
}
}
// Searcher just to find the single exact only
// match, if present:
Util.TopNSearcher<Pair<Long,BytesRef>> searcher;
searcher = new Util.TopNSearcher<>(
fst, count * maxSurfaceFormsPerAnalyzedForm, count * maxSurfaceFormsPerAnalyzedForm, weightComparator);
// NOTE: we could almost get away with only using
// the first start node. The only catch is if
// maxSurfaceFormsPerAnalyzedForm had kicked in and
// pruned our exact match from one of these nodes
// ...:
for (FSTUtil.Path<Pair<Long,BytesRef>> path : prefixPaths) {
if (fst.findTargetArc(endByte, path.fstNode, scratchArc, bytesReader) != null) {
// This node has END_BYTE arc leaving, meaning it's an
// "exact" match:
searcher.addStartPaths(scratchArc, fst.outputs.add(path.output, scratchArc.output), false, path.input);
}
}
Util.TopResults<Pair<Long,BytesRef>> completions = searcher.search();
// NOTE: this is rather inefficient: we enumerate
// every matching "exactly the same analyzed form"
// path, and then do linear scan to see if one of
// these exactly matches the input. It should be
// possible (though hairy) to do something similar
// to getByOutput, since the surface form is encoded
// into the FST output, so we more efficiently hone
// in on the exact surface-form match. Still, I
// suspect very little time is spent in this linear
// seach: it's bounded by how many prefix start
// nodes we have and the
// maxSurfaceFormsPerAnalyzedForm:
for(Result<Pair<Long,BytesRef>> completion : completions) {
BytesRef output2 = completion.output.output2;
if (sameSurfaceForm(utf8Key, output2)) {
results.add(getLookupResult(completion.output.output1, output2, spare));
break;
}
}
if (results.size() == num) {
// That was quick:
return results;
}
}
Util.TopNSearcher<Pair<Long,BytesRef>> searcher;
searcher = new Util.TopNSearcher<Pair<Long,BytesRef>>(fst,
num - results.size(),
num * maxAnalyzedPathsForOneInput,
weightComparator) {
private final Set<BytesRef> seen = new HashSet<>();
@Override
protected boolean acceptResult(IntsRef input, Pair<Long,BytesRef> output) {
// Dedup: when the input analyzes to a graph we
// can get duplicate surface forms:
if (seen.contains(output.output2)) {
return false;
}
seen.add(output.output2);
if (!exactFirst) {
return true;
} else {
// In exactFirst mode, don't accept any paths
// matching the surface form since that will
// create duplicate results:
if (sameSurfaceForm(utf8Key, output.output2)) {
// We found exact match, which means we should
// have already found it in the first search:
assert results.size() == 1;
return false;
} else {
return true;
}
}
}
};
prefixPaths = getFullPrefixPaths(prefixPaths, lookupAutomaton, fst);
for (FSTUtil.Path<Pair<Long,BytesRef>> path : prefixPaths) {
searcher.addStartPaths(path.fstNode, path.output, true, path.input);
}
TopResults<Pair<Long,BytesRef>> completions = searcher.search();
for(Result<Pair<Long,BytesRef>> completion : completions) {
LookupResult result = getLookupResult(completion.output.output1, completion.output.output2, spare);
// TODO: for fuzzy case would be nice to return
// how many edits were required
//System.out.println(" result=" + result);
results.add(result);
if (results.size() == num) {
// In the exactFirst=true case the search may
// produce one extra path
break;
}
}
return results;
} catch (IOException bogus) {
throw new RuntimeException(bogus);
}
}
@Override
public boolean store(DataOutput output) throws IOException {
output.writeVLong(count);
if (fst == null) {
return false;
}
fst.save(output);
output.writeVInt(maxAnalyzedPathsForOneInput);
output.writeByte((byte) (hasPayloads ? 1 : 0));
return true;
}
@Override
public boolean load(DataInput input) throws IOException {
count = input.readVLong();
this.fst = new FST<>(input, new PairOutputs<>(PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton()));
maxAnalyzedPathsForOneInput = input.readVInt();
hasPayloads = input.readByte() == 1;
return true;
}
/** Returns all completion paths to initialize the search. */
protected List<FSTUtil.Path<Pair<Long,BytesRef>>> getFullPrefixPaths(List<FSTUtil.Path<Pair<Long,BytesRef>>> prefixPaths,
Automaton lookupAutomaton,
FST<Pair<Long,BytesRef>> fst)
throws IOException {
return prefixPaths;
}
final Automaton toAutomaton(final BytesRef surfaceForm, final TokenStreamToAutomaton ts2a) throws IOException {
try (TokenStream ts = indexAnalyzer.tokenStream("", surfaceForm.utf8ToString())) {
return toAutomaton(ts, ts2a);
}
}
final Automaton toAutomaton(TokenStream ts, final TokenStreamToAutomaton ts2a) throws IOException {
// Create corresponding automaton: labels are bytes
// from each analyzed token, with byte 0 used as
// separator between tokens:
Automaton automaton = ts2a.toAutomaton(ts);
automaton = replaceSep(automaton);
automaton = convertAutomaton(automaton);
// TODO: LUCENE-5660 re-enable this once we disallow massive suggestion strings
// assert SpecialOperations.isFinite(automaton);
// Get all paths from the automaton (there can be
// more than one path, eg if the analyzer created a
// graph using SynFilter or WDF):
return automaton;
}
// EDIT: Adrien, needed by lookup providers
// NOTE: these XForks are unmaintainable, we need to get rid of them...
public Set<IntsRef> toFiniteStrings(TokenStream stream) throws IOException {
final TokenStreamToAutomaton ts2a = getTokenStreamToAutomaton();
Automaton automaton;
try (TokenStream ts = stream) {
automaton = toAutomaton(ts, ts2a);
}
LimitedFiniteStringsIterator finiteStrings =
new LimitedFiniteStringsIterator(automaton, maxGraphExpansions);
Set<IntsRef> set = new HashSet<>();
for (IntsRef string = finiteStrings.next(); string != null; string = finiteStrings.next()) {
set.add(IntsRef.deepCopyOf(string));
}
return Collections.unmodifiableSet(set);
}
final Automaton toLookupAutomaton(final CharSequence key) throws IOException {
// TODO: is there a Reader from a CharSequence?
// Turn tokenstream into automaton:
Automaton automaton = null;
try (TokenStream ts = queryAnalyzer.tokenStream("", key.toString())) {
automaton = getTokenStreamToAutomaton().toAutomaton(ts);
}
automaton = replaceSep(automaton);
// TODO: we can optimize this somewhat by determinizing
// while we convert
// This automaton should not blow up during determinize:
automaton = Operations.determinize(automaton, Integer.MAX_VALUE);
return automaton;
}
/**
* Returns the weight associated with an input string, or null if it does not exist.
*
* Unsupported in this implementation (and will throw an {@link UnsupportedOperationException}).
*
* @param key input string
* @return the weight associated with the input string, or {@code null} if it does not exist.
*/
public Object get(CharSequence key) {
throw new UnsupportedOperationException();
}
/**
* cost -> weight
*
* @param encoded Cost
* @return Weight
*/
public static int decodeWeight(long encoded) {
return (int)(Integer.MAX_VALUE - encoded);
}
/**
* weight -> cost
*
* @param value Weight
* @return Cost
*/
public static int encodeWeight(long value) {
if (value < 0 || value > Integer.MAX_VALUE) {
throw new UnsupportedOperationException("cannot encode value: " + value);
}
return Integer.MAX_VALUE - (int)value;
}
static final Comparator<Pair<Long,BytesRef>> weightComparator = new Comparator<Pair<Long,BytesRef>> () {
@Override
public int compare(Pair<Long,BytesRef> left, Pair<Long,BytesRef> right) {
return left.output1.compareTo(right.output1);
}
};
public static class XBuilder {
private Builder<Pair<Long, BytesRef>> builder;
private int maxSurfaceFormsPerAnalyzedForm;
private IntsRefBuilder scratchInts = new IntsRefBuilder();
private final PairOutputs<Long, BytesRef> outputs;
private boolean hasPayloads;
private BytesRefBuilder analyzed = new BytesRefBuilder();
private final SurfaceFormAndPayload[] surfaceFormsAndPayload;
private int count;
private ObjectIntHashMap<BytesRef> seenSurfaceForms = HppcMaps.Object.Integer.ensureNoNullKeys(256, 0.75f);
private int payloadSep;
public XBuilder(int maxSurfaceFormsPerAnalyzedForm, boolean hasPayloads, int payloadSep) {
this.payloadSep = payloadSep;
this.outputs = new PairOutputs<>(PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton());
this.builder = new Builder<>(FST.INPUT_TYPE.BYTE1, outputs);
this.maxSurfaceFormsPerAnalyzedForm = maxSurfaceFormsPerAnalyzedForm;
this.hasPayloads = hasPayloads;
surfaceFormsAndPayload = new SurfaceFormAndPayload[maxSurfaceFormsPerAnalyzedForm];
}
public void startTerm(BytesRef analyzed) {
this.analyzed.grow(analyzed.length+2);
this.analyzed.copyBytes(analyzed);
}
private static final class SurfaceFormAndPayload implements Comparable<SurfaceFormAndPayload> {
BytesRef payload;
long weight;
SurfaceFormAndPayload(BytesRef payload, long cost) {
super();
this.payload = payload;
this.weight = cost;
}
@Override
public int compareTo(SurfaceFormAndPayload o) {
int res = compare(weight, o.weight);
if (res == 0 ){
return payload.compareTo(o.payload);
}
return res;
}
public static int compare(long x, long y) {
return (x < y) ? -1 : ((x == y) ? 0 : 1);
}
}
public void addSurface(BytesRef surface, BytesRef payload, long cost) throws IOException {
int surfaceIndex = -1;
long encodedWeight = cost == -1 ? cost : encodeWeight(cost);
/*
* we need to check if we have seen this surface form, if so only use the
* the surface form with the highest weight and drop the rest no matter if
* the payload differs.
*/
if (count >= maxSurfaceFormsPerAnalyzedForm) {
// More than maxSurfaceFormsPerAnalyzedForm
// dups: skip the rest:
return;
}
BytesRef surfaceCopy;
final int keySlot;
if (count > 0 && (keySlot = seenSurfaceForms.indexOf(surface)) >= 0) {
surfaceIndex = seenSurfaceForms.indexGet(keySlot);
SurfaceFormAndPayload surfaceFormAndPayload = surfaceFormsAndPayload[surfaceIndex];
if (encodedWeight >= surfaceFormAndPayload.weight) {
return;
}
surfaceCopy = BytesRef.deepCopyOf(surface);
} else {
surfaceIndex = count++;
surfaceCopy = BytesRef.deepCopyOf(surface);
seenSurfaceForms.put(surfaceCopy, surfaceIndex);
}
BytesRef payloadRef;
if (!hasPayloads) {
payloadRef = surfaceCopy;
} else {
int len = surface.length + 1 + payload.length;
final BytesRef br = new BytesRef(len);
System.arraycopy(surface.bytes, surface.offset, br.bytes, 0, surface.length);
br.bytes[surface.length] = (byte) payloadSep;
System.arraycopy(payload.bytes, payload.offset, br.bytes, surface.length + 1, payload.length);
br.length = len;
payloadRef = br;
}
if (surfaceFormsAndPayload[surfaceIndex] == null) {
surfaceFormsAndPayload[surfaceIndex] = new SurfaceFormAndPayload(payloadRef, encodedWeight);
} else {
surfaceFormsAndPayload[surfaceIndex].payload = payloadRef;
surfaceFormsAndPayload[surfaceIndex].weight = encodedWeight;
}
}
public void finishTerm(long defaultWeight) throws IOException {
ArrayUtil.timSort(surfaceFormsAndPayload, 0, count);
int deduplicator = 0;
analyzed.append((byte) 0);
analyzed.setLength(analyzed.length() + 1);
analyzed.grow(analyzed.length());
for (int i = 0; i < count; i++) {
analyzed.setByteAt(analyzed.length() - 1, (byte) deduplicator++);
Util.toIntsRef(analyzed.get(), scratchInts);
SurfaceFormAndPayload candiate = surfaceFormsAndPayload[i];
long cost = candiate.weight == -1 ? encodeWeight(Math.min(Integer.MAX_VALUE, defaultWeight)) : candiate.weight;
builder.add(scratchInts.get(), outputs.newPair(cost, candiate.payload));
}
seenSurfaceForms.clear();
count = 0;
}
public FST<Pair<Long, BytesRef>> build() throws IOException {
return builder.finish();
}
public boolean hasPayloads() {
return hasPayloads;
}
public int maxSurfaceFormsPerAnalyzedForm() {
return maxSurfaceFormsPerAnalyzedForm;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.wafv2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/wafv2-2019-07-29/GetRegexPatternSet" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetRegexPatternSetRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the set. You cannot change the name after you create the set.
* </p>
*/
private String name;
/**
* <p>
* Specifies whether this is for an Amazon CloudFront distribution or for a regional application. A regional
* application can be an Application Load Balancer (ALB), an Amazon API Gateway REST API, or an AppSync GraphQL API.
* </p>
* <p>
* To work with CloudFront, you must also specify the Region US East (N. Virginia) as follows:
* </p>
* <ul>
* <li>
* <p>
* CLI - Specify the Region when you use the CloudFront scope: <code>--scope=CLOUDFRONT --region=us-east-1</code>.
* </p>
* </li>
* <li>
* <p>
* API and SDKs - For all calls, use the Region endpoint us-east-1.
* </p>
* </li>
* </ul>
*/
private String scope;
/**
* <p>
* A unique identifier for the set. This ID is returned in the responses to create and list commands. You provide it
* to operations like update and delete.
* </p>
*/
private String id;
/**
* <p>
* The name of the set. You cannot change the name after you create the set.
* </p>
*
* @param name
* The name of the set. You cannot change the name after you create the set.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the set. You cannot change the name after you create the set.
* </p>
*
* @return The name of the set. You cannot change the name after you create the set.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the set. You cannot change the name after you create the set.
* </p>
*
* @param name
* The name of the set. You cannot change the name after you create the set.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetRegexPatternSetRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* Specifies whether this is for an Amazon CloudFront distribution or for a regional application. A regional
* application can be an Application Load Balancer (ALB), an Amazon API Gateway REST API, or an AppSync GraphQL API.
* </p>
* <p>
* To work with CloudFront, you must also specify the Region US East (N. Virginia) as follows:
* </p>
* <ul>
* <li>
* <p>
* CLI - Specify the Region when you use the CloudFront scope: <code>--scope=CLOUDFRONT --region=us-east-1</code>.
* </p>
* </li>
* <li>
* <p>
* API and SDKs - For all calls, use the Region endpoint us-east-1.
* </p>
* </li>
* </ul>
*
* @param scope
* Specifies whether this is for an Amazon CloudFront distribution or for a regional application. A regional
* application can be an Application Load Balancer (ALB), an Amazon API Gateway REST API, or an AppSync
* GraphQL API. </p>
* <p>
* To work with CloudFront, you must also specify the Region US East (N. Virginia) as follows:
* </p>
* <ul>
* <li>
* <p>
* CLI - Specify the Region when you use the CloudFront scope:
* <code>--scope=CLOUDFRONT --region=us-east-1</code>.
* </p>
* </li>
* <li>
* <p>
* API and SDKs - For all calls, use the Region endpoint us-east-1.
* </p>
* </li>
* @see Scope
*/
public void setScope(String scope) {
this.scope = scope;
}
/**
* <p>
* Specifies whether this is for an Amazon CloudFront distribution or for a regional application. A regional
* application can be an Application Load Balancer (ALB), an Amazon API Gateway REST API, or an AppSync GraphQL API.
* </p>
* <p>
* To work with CloudFront, you must also specify the Region US East (N. Virginia) as follows:
* </p>
* <ul>
* <li>
* <p>
* CLI - Specify the Region when you use the CloudFront scope: <code>--scope=CLOUDFRONT --region=us-east-1</code>.
* </p>
* </li>
* <li>
* <p>
* API and SDKs - For all calls, use the Region endpoint us-east-1.
* </p>
* </li>
* </ul>
*
* @return Specifies whether this is for an Amazon CloudFront distribution or for a regional application. A regional
* application can be an Application Load Balancer (ALB), an Amazon API Gateway REST API, or an AppSync
* GraphQL API. </p>
* <p>
* To work with CloudFront, you must also specify the Region US East (N. Virginia) as follows:
* </p>
* <ul>
* <li>
* <p>
* CLI - Specify the Region when you use the CloudFront scope:
* <code>--scope=CLOUDFRONT --region=us-east-1</code>.
* </p>
* </li>
* <li>
* <p>
* API and SDKs - For all calls, use the Region endpoint us-east-1.
* </p>
* </li>
* @see Scope
*/
public String getScope() {
return this.scope;
}
/**
* <p>
* Specifies whether this is for an Amazon CloudFront distribution or for a regional application. A regional
* application can be an Application Load Balancer (ALB), an Amazon API Gateway REST API, or an AppSync GraphQL API.
* </p>
* <p>
* To work with CloudFront, you must also specify the Region US East (N. Virginia) as follows:
* </p>
* <ul>
* <li>
* <p>
* CLI - Specify the Region when you use the CloudFront scope: <code>--scope=CLOUDFRONT --region=us-east-1</code>.
* </p>
* </li>
* <li>
* <p>
* API and SDKs - For all calls, use the Region endpoint us-east-1.
* </p>
* </li>
* </ul>
*
* @param scope
* Specifies whether this is for an Amazon CloudFront distribution or for a regional application. A regional
* application can be an Application Load Balancer (ALB), an Amazon API Gateway REST API, or an AppSync
* GraphQL API. </p>
* <p>
* To work with CloudFront, you must also specify the Region US East (N. Virginia) as follows:
* </p>
* <ul>
* <li>
* <p>
* CLI - Specify the Region when you use the CloudFront scope:
* <code>--scope=CLOUDFRONT --region=us-east-1</code>.
* </p>
* </li>
* <li>
* <p>
* API and SDKs - For all calls, use the Region endpoint us-east-1.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see Scope
*/
public GetRegexPatternSetRequest withScope(String scope) {
setScope(scope);
return this;
}
/**
* <p>
* Specifies whether this is for an Amazon CloudFront distribution or for a regional application. A regional
* application can be an Application Load Balancer (ALB), an Amazon API Gateway REST API, or an AppSync GraphQL API.
* </p>
* <p>
* To work with CloudFront, you must also specify the Region US East (N. Virginia) as follows:
* </p>
* <ul>
* <li>
* <p>
* CLI - Specify the Region when you use the CloudFront scope: <code>--scope=CLOUDFRONT --region=us-east-1</code>.
* </p>
* </li>
* <li>
* <p>
* API and SDKs - For all calls, use the Region endpoint us-east-1.
* </p>
* </li>
* </ul>
*
* @param scope
* Specifies whether this is for an Amazon CloudFront distribution or for a regional application. A regional
* application can be an Application Load Balancer (ALB), an Amazon API Gateway REST API, or an AppSync
* GraphQL API. </p>
* <p>
* To work with CloudFront, you must also specify the Region US East (N. Virginia) as follows:
* </p>
* <ul>
* <li>
* <p>
* CLI - Specify the Region when you use the CloudFront scope:
* <code>--scope=CLOUDFRONT --region=us-east-1</code>.
* </p>
* </li>
* <li>
* <p>
* API and SDKs - For all calls, use the Region endpoint us-east-1.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see Scope
*/
public GetRegexPatternSetRequest withScope(Scope scope) {
this.scope = scope.toString();
return this;
}
/**
* <p>
* A unique identifier for the set. This ID is returned in the responses to create and list commands. You provide it
* to operations like update and delete.
* </p>
*
* @param id
* A unique identifier for the set. This ID is returned in the responses to create and list commands. You
* provide it to operations like update and delete.
*/
public void setId(String id) {
this.id = id;
}
/**
* <p>
* A unique identifier for the set. This ID is returned in the responses to create and list commands. You provide it
* to operations like update and delete.
* </p>
*
* @return A unique identifier for the set. This ID is returned in the responses to create and list commands. You
* provide it to operations like update and delete.
*/
public String getId() {
return this.id;
}
/**
* <p>
* A unique identifier for the set. This ID is returned in the responses to create and list commands. You provide it
* to operations like update and delete.
* </p>
*
* @param id
* A unique identifier for the set. This ID is returned in the responses to create and list commands. You
* provide it to operations like update and delete.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetRegexPatternSetRequest withId(String id) {
setId(id);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getScope() != null)
sb.append("Scope: ").append(getScope()).append(",");
if (getId() != null)
sb.append("Id: ").append(getId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetRegexPatternSetRequest == false)
return false;
GetRegexPatternSetRequest other = (GetRegexPatternSetRequest) obj;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getScope() == null ^ this.getScope() == null)
return false;
if (other.getScope() != null && other.getScope().equals(this.getScope()) == false)
return false;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getScope() == null) ? 0 : getScope().hashCode());
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
return hashCode;
}
@Override
public GetRegexPatternSetRequest clone() {
return (GetRegexPatternSetRequest) super.clone();
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.plugins.groovy.compiler;
import com.intellij.compiler.options.JavaCompilersTab;
import com.intellij.compiler.server.BuildManager;
import com.intellij.ide.DataManager;
import com.intellij.openapi.compiler.options.ExcludedEntriesConfigurable;
import com.intellij.openapi.compiler.options.ExcludesConfiguration;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.options.SearchableConfigurable;
import com.intellij.openapi.options.ex.Settings;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.ui.TextFieldWithBrowseButton;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.TextWithMnemonic;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.ColorUtil;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.jps.model.java.JavaModuleSourceRootTypes;
import org.jetbrains.plugins.groovy.GroovyBundle;
import javax.swing.*;
import javax.swing.event.HyperlinkEvent;
import javax.swing.text.DefaultCaret;
import javax.swing.text.EditorKit;
import javax.swing.text.html.HTMLEditorKit;
import javax.swing.text.html.StyleSheet;
import java.awt.*;
import java.util.List;
import java.util.Objects;
/**
* @author peter
*/
public class GroovyCompilerConfigurable implements SearchableConfigurable, Configurable.NoScroll {
private final Project myProject;
private JPanel myMainPanel;
private JPanel myExcludesPanel;
private JBCheckBox myInvokeDynamicSupportCB;
private TextFieldWithBrowseButton myConfigScriptPath;
private JPanel myPathPanel;
private final ExcludedEntriesConfigurable myExcludes;
private final GroovyCompilerConfiguration myConfig;
public GroovyCompilerConfigurable(Project project) {
myProject = project;
myConfig = GroovyCompilerConfiguration.getInstance(project);
myExcludes = createExcludedConfigurable(project);
myExcludesPanel.setBorder(IdeBorderFactory.createTitledBorder(GroovyBundle.message("settings.compiler.exclude.from.stub.generation"), false, JBUI.insetsTop(8)).setShowLine(false));
}
public ExcludedEntriesConfigurable getExcludes() {
return myExcludes;
}
private ExcludedEntriesConfigurable createExcludedConfigurable(@NotNull Project project) {
final ExcludesConfiguration configuration = myConfig.getExcludeFromStubGeneration();
ProjectFileIndex index = project.isDefault() ? null : ProjectRootManager.getInstance(project).getFileIndex();
final FileChooserDescriptor descriptor = new FileChooserDescriptor(true, true, false, false, false, true) {
@Override
public boolean isFileVisible(VirtualFile file, boolean showHiddenFiles) {
return super.isFileVisible(file, showHiddenFiles) && (index == null || !index.isExcluded(file));
}
};
descriptor.setRoots(ContainerUtil.concat(ContainerUtil.<Module, List<VirtualFile>>map(ModuleManager.getInstance(project).getModules(),
module -> ModuleRootManager.getInstance(module)
.getSourceRoots(JavaModuleSourceRootTypes.SOURCES))));
return new ExcludedEntriesConfigurable(project, descriptor, configuration);
}
@Override
@NotNull
public String getId() {
return "Groovy compiler";
}
@Override
public String getDisplayName() {
return GroovyBundle.message("configurable.GroovyCompilerConfigurable.display.name");
}
@Override
public String getHelpTopic() {
return "reference.projectsettings.compiler.groovy";
}
@Override
public JComponent createComponent() {
myExcludesPanel.add(myExcludes.createComponent());
return myMainPanel;
}
@Override
public boolean isModified() {
return !Objects.equals(myConfig.getConfigScript(), getExternalizableConfigScript()) ||
myInvokeDynamicSupportCB.isSelected() != myConfig.isInvokeDynamic() ||
myExcludes.isModified();
}
@Override
public void apply() throws ConfigurationException {
myExcludes.apply();
myConfig.setInvokeDynamic(myInvokeDynamicSupportCB.isSelected());
myConfig.setConfigScript(getExternalizableConfigScript());
if (!myProject.isDefault()) {
BuildManager.getInstance().clearState(myProject);
}
}
@Override
public void reset() {
myConfigScriptPath.setText(FileUtil.toSystemDependentName(myConfig.getConfigScript()));
myInvokeDynamicSupportCB.setSelected(myConfig.isInvokeDynamic());
myExcludes.reset();
}
@Override
public void disposeUIResources() {
myExcludes.disposeUIResources();
}
@NotNull
private String getExternalizableConfigScript() {
return FileUtil.toSystemIndependentName(myConfigScriptPath.getText());
}
private void createUIComponents() {
myPathPanel = new JPanel(new GridBagLayout());
GridBag gb = new GridBag().setDefaultWeightX(1.0).
setDefaultAnchor(GridBagConstraints.LINE_START).
setDefaultFill(GridBagConstraints.HORIZONTAL);
FileChooserDescriptor descriptor = new FileChooserDescriptor(true, false, false, false, false, false);
myConfigScriptPath = new TextFieldWithBrowseButton();
myConfigScriptPath.addBrowseFolderListener(null, GroovyBundle.message("settings.compiler.select.path.to.groovy.compiler.configscript"), null, descriptor);
myPathPanel.add(createTopLabel(), gb.nextLine());
myPathPanel.add(UI.PanelFactory.panel(myConfigScriptPath).withLabel(GroovyBundle.message("settings.compiler.path.to.configscript")).createPanel(), gb.nextLine().insetTop(13));
String cbText = GroovyBundle.message("settings.compiler.invoke.dynamic.support");
TextWithMnemonic parsedText = TextWithMnemonic.parse(cbText);
myInvokeDynamicSupportCB = new JBCheckBox(parsedText.getText(true));
myInvokeDynamicSupportCB.setDisplayedMnemonicIndex(parsedText.getMnemonicIndex());
myPathPanel.add(myInvokeDynamicSupportCB, gb.nextLine().insetTop(8));
}
private static JComponent createTopLabel() {
JEditorPane tipComponent = new JEditorPane();
tipComponent.setContentType("text/html");
tipComponent.setEditable(false);
tipComponent.setEditorKit(HTMLEditorKitBuilder.simple());
EditorKit kit = tipComponent.getEditorKit();
if (kit instanceof HTMLEditorKit) {
StyleSheet css = ((HTMLEditorKit)kit).getStyleSheet();
css.addRule("a, a:link {color:#" + ColorUtil.toHex(JBUI.CurrentTheme.Link.Foreground.ENABLED) + ";}");
css.addRule("a:visited {color:#" + ColorUtil.toHex(JBUI.CurrentTheme.Link.Foreground.VISITED) + ";}");
css.addRule("a:hover {color:#" + ColorUtil.toHex(JBUI.CurrentTheme.Link.Foreground.HOVERED) + ";}");
css.addRule("a:active {color:#" + ColorUtil.toHex(JBUI.CurrentTheme.Link.Foreground.PRESSED) + ";}");
//css.addRule("body {background-color:#" + ColorUtil.toHex(info.warning ? warningBackgroundColor() : errorBackgroundColor()) + ";}");
}
if (tipComponent.getCaret() instanceof DefaultCaret) {
((DefaultCaret)tipComponent.getCaret()).setUpdatePolicy(DefaultCaret.NEVER_UPDATE);
}
tipComponent.setCaretPosition(0);
tipComponent.setText(GroovyBundle.message("settings.compiler.alternative"));
tipComponent.addHyperlinkListener(e -> {
if (e.getEventType() == HyperlinkEvent.EventType.ACTIVATED) {
Settings allSettings = Settings.KEY.getData(DataManager.getInstance().getDataContext(tipComponent));
if (allSettings != null) {
Configurable javacConfigurable = allSettings.find(JavaCompilersTab.class);
if (javacConfigurable != null) {
allSettings.select(javacConfigurable);
}
}
}
});
tipComponent.setBorder(null);
tipComponent.setOpaque(false);
return tipComponent;
}
}
| |
/*
* Copyright (C) 2012 ZONE Media GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* PListLoader.java
*
* Asynchronously downloads and parses property list files.
* see http://en.wikipedia.org/wiki/Property_list
* or https://developer.apple.com/library/mac/#documentation/Cocoa/Conceptual/PropertyLists/Introduction/Introduction.html
* for further details on property lists.
*
* This is not a general parser for Plist data. Only the
* specific structure used by petite madeleine as well as
* the following data types are supported:
* <array>, <dict>, <string>.
*/
package at.zone.madeleine.data;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import android.os.AsyncTask;
public class PListLoader {
private String tag; // can be used to pass extra info about the downloaded
// data in the callback for example an issueKey
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// Initiate download and parsing of remote property list:
public void loadPListFromUrl(String url, PListLoaderCallback delegate){
new DomFromUrlAsyncTask().execute(url, delegate);
}
public void loadPListFromUrl(String url, PListLoaderCallback delegate, String tag){
this.tag = tag;
loadPListFromUrl(url, delegate);
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - P A R S I N G
// translate Document object with PList data into a HashMap structure:
private static HashMap<String, Object> hashMapFromDOM(Document dom){
Element rootElement = dom.getDocumentElement(); // <plist> element
Element mainElement = (Element) rootElement.getChildNodes().item(1); // root <dict> or <array> element
if(isArrayElement(mainElement)){
HashMap<String, Object> container = new HashMap<String, Object>();
container.put("array", arrayElement2ArrayList(mainElement));
return container;
} else {
return dictElement2HashMap(mainElement);
}
}
// translate Element object with <dict> data into a HashMap structure:
private static HashMap<String, Object> dictElement2HashMap(Element element){
HashMap<String, Object> data = new HashMap<String, Object>();
// extract all element nodes:
NodeList children = element.getChildNodes();
ArrayList<Element> childElements = new ArrayList<Element>();
for(int i=0;i<children.getLength();i++){
Node child = children.item(i);
if(child instanceof Element){
childElements.add((Element) child);
}
}
// iterate over child elements:
for(int i=0;i<childElements.size();i+=2){
Element keyElement = (Element) childElements.get(i);
String keyName = keyElement.getTextContent();
Element contentElement = (Element) childElements.get(i+1);
// simply add it to hash if its a string value:
if(isStringElement(contentElement)){
data.put(keyName, contentElement.getTextContent());
}
// recrusively add if its another dict structure:
if(isDictionaryElement(contentElement)){
HashMap<String, Object> childData = dictElement2HashMap(contentElement);
data.put(keyName, childData);
}
if(isArrayElement(contentElement)){
data.put(keyName, arrayElement2ArrayList(contentElement) );
}
}
return data;
}
// translate Element object with <array> data into a ArrayList:
private static ArrayList arrayElement2ArrayList(Element element) {
ArrayList data = new ArrayList();
// extract all element nodes:
NodeList children = element.getChildNodes();
ArrayList<Element> childElements = new ArrayList<Element>();
for(int i=0;i<children.getLength();i++){
Node child = children.item(i);
if(child instanceof Element){
childElements.add((Element) child);
}
}
// iterate over child elements:
for(int i=0;i<childElements.size();i++){
Element contentElement = (Element) childElements.get(i);
if(isStringElement(contentElement)){
data.add(contentElement.getTextContent());
}
if(isArrayElement(contentElement)){
data.add(arrayElement2ArrayList(contentElement));
}
if(isDictionaryElement(contentElement)){
data.add(dictElement2HashMap(contentElement));
}
}
return data;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - utility methods:
private static boolean isDictionaryElement(Element element){
return isElementType(element, "dict");
}
private static boolean isStringElement(Element element){
return isElementType(element, "string");
}
private static boolean isArrayElement(Element element) {
return isElementType(element, "array");
}
private static boolean isElementType(Element element, String name){
String tagName = element.getTagName();
return tagName.equalsIgnoreCase(name);
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
// AsyncTask to download plist and parse its xml structure:
private class DomFromUrlAsyncTask extends AsyncTask<Object, Void, Document> {
private PListLoaderCallback delegate;
private boolean networkError = false;
protected Document doInBackground(Object... arg0) {
String url = (String) arg0[0];
this.delegate = (PListLoaderCallback) arg0[1];
return getDomFromUrl(url);
}
protected void onPostExecute(Document result){
if(networkError){
delegate.onNetworkError();
} else {
delegate.plistDataReady(hashMapFromDOM(result), tag);
}
}
private Document getDomFromUrl(String url){
Document dom = null;
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
dom = db.parse(url);
} catch (ParserConfigurationException e) {
e.printStackTrace();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
networkError = true;
}
return dom;
}
}
}
| |
package nyla.solutions.core.data.clock;
import nyla.solutions.core.data.Identifier;
import nyla.solutions.core.data.Mappable;
import nyla.solutions.core.data.Nameable;
import nyla.solutions.core.data.Textable;
import java.io.Serializable;
/**
* A named timing event
*
*
* @param <ValueType> the value type
* @param <KeyType> the type
*
* @author Gregory Green
*
*/
public class TimingAuditMappable<KeyType,ValueType> implements Serializable, Nameable, Mappable<KeyType,ValueType>, Identifier, Textable
{
/**
*
*/
private static final long serialVersionUID = -1210199192832986691L;
/**
* @return the key
*/
public KeyType getKey()
{
return key;
}
/**
* @param key the key to set
*/
public void setKey(KeyType key)
{
this.key = key;
}
/**
* @return the name
*/
public String getName()
{
return name;
}
/**
* @param name the name to set
*/
public void setName(String name)
{
this.name = name;
}
/**
* @return the time
*/
public Time getTime()
{
return time;
}
/**
* @param time the time to set
*/
public void setTime(Time time)
{
this.time = time;
}
/**
* @return the value
*/
public ValueType getValue()
{
return value;
}
/**
* @param value the value to set
*/
public void setValue(ValueType value)
{
this.value = value;
}
/**
* @return the from
*/
public String getFrom()
{
return from;
}
/**
* @param from the from to set
*/
public void setFrom(String from)
{
this.from = from;
}
/**
* @return the to
*/
public String getTo()
{
return to;
}
/**
* @param to the to to set
*/
public void setTo(String to)
{
this.to = to;
}
/**
* @return the id
*/
public String getId()
{
return id;
}
/**
* @param id the id to set
*/
public void setId(String id)
{
this.id = id;
}
/**
* @return the system
*/
public String getSystem()
{
return system;
}
/**
* @param system the system to set
*/
public void setSystem(String system)
{
this.system = system;
}
/**
* @return the text
*/
public String getText()
{
return text;
}
/**
* @param text the text to set
*/
public void setText(String text)
{
this.text = text;
}
/**
* @return the host
*/
public String getHost()
{
return host;
}
/**
* @param host the host to set
*/
public void setHost(String host)
{
this.host = host;
}
/**
* @return the processId
*/
public int getProcessId()
{
return processId;
}
/**
* @param processId the processId to set
*/
public void setProcessId(int processId)
{
this.processId = processId;
}
/**
* @return the operation
*/
public String getOperation()
{
return operation;
}
/**
* @param operation the operation to set
*/
public void setOperation(String operation)
{
this.operation = operation;
}
/**
* @return the dataName
*/
public String getDataName()
{
return dataName;
}
/**
* @param dataName the dataName to set
*/
public void setDataName(String dataName)
{
this.dataName = dataName;
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString()
{
StringBuilder builder = new StringBuilder();
builder.append("TimingAuditMappable [id=").append(id).append(", text=")
.append(text).append(", key=").append(key).append(", value=")
.append(value).append(", operation=").append(operation)
.append(", dataName=").append(dataName).append(", system=")
.append(system).append(", name=").append(name)
.append(", from=").append(from).append(", to=").append(to)
.append(", host=").append(host).append(", processId=")
.append(processId).append(", time=").append(time).append("]");
return builder.toString();
}
/**
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result
+ ((dataName == null) ? 0 : dataName.hashCode());
result = prime * result + ((from == null) ? 0 : from.hashCode());
result = prime * result + ((host == null) ? 0 : host.hashCode());
result = prime * result + ((id == null) ? 0 : id.hashCode());
result = prime * result + ((key == null) ? 0 : key.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result
+ ((operation == null) ? 0 : operation.hashCode());
result = prime * result + processId;
result = prime * result + ((system == null) ? 0 : system.hashCode());
result = prime * result + ((text == null) ? 0 : text.hashCode());
result = prime * result + ((time == null) ? 0 : time.hashCode());
result = prime * result + ((to == null) ? 0 : to.hashCode());
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
/**
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TimingAuditMappable<?,?> other = (TimingAuditMappable<?,?>) obj;
if (dataName == null)
{
if (other.dataName != null)
return false;
}
else if (!dataName.equals(other.dataName))
return false;
if (from == null)
{
if (other.from != null)
return false;
}
else if (!from.equals(other.from))
return false;
if (host == null)
{
if (other.host != null)
return false;
}
else if (!host.equals(other.host))
return false;
if (id == null)
{
if (other.id != null)
return false;
}
else if (!id.equals(other.id))
return false;
if (key == null)
{
if (other.key != null)
return false;
}
else if (!key.equals(other.key))
return false;
if (name == null)
{
if (other.name != null)
return false;
}
else if (!name.equals(other.name))
return false;
if (operation == null)
{
if (other.operation != null)
return false;
}
else if (!operation.equals(other.operation))
return false;
if (processId != other.processId)
return false;
if (system == null)
{
if (other.system != null)
return false;
}
else if (!system.equals(other.system))
return false;
if (text == null)
{
if (other.text != null)
return false;
}
else if (!text.equals(other.text))
return false;
if (time == null)
{
if (other.time != null)
return false;
}
else if (!time.equals(other.time))
return false;
if (to == null)
{
if (other.to != null)
return false;
}
else if (!to.equals(other.to))
return false;
if (value == null)
{
if (other.value != null)
return false;
}
else if (!value.equals(other.value))
return false;
return true;
}
private String id;
private String text;
private KeyType key;
private ValueType value;
private String operation;
private String dataName;
private String system;
private String name;
private String from;
private String to;
private String host;
private int processId;
private Time time;
}
| |
/*
* This file is part of EmergencyLanding, licensed under the MIT License (MIT).
*
* Copyright (c) TechShroom Studios <https://techshoom.com>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.techshroom.emergencylanding.library.util;
import static org.lwjgl.opengl.GL11.GL_VERSION;
import static org.lwjgl.opengl.GL11.glGetString;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
import java.lang.reflect.Field;
import java.nio.ByteBuffer;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.function.Supplier;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import javax.sound.midi.MidiDevice.Info;
import org.lwjgl.BufferUtils;
import org.lwjgl.glfw.GLFW;
import org.lwjgl.glfw.GLFWErrorCallback;
import org.lwjgl.opengl.GL11;
import org.lwjgl.system.Configuration;
import org.lwjgl.system.Platform;
import com.google.common.io.ByteStreams;
import com.techshroom.emergencylanding.library.util.interfaces.IOConsumer;
public final class LUtils {
/**
* A dummy method to load this class. Does nothing.
*/
public static void init() {
}
public static String VERSION = "1.3.1";
public static final String LIB_NAME = "EmergencyLanding".intern();
public static final String SHORT_LIB_NAME = "EL".intern();
private static final String LOWER_LIB_NAME = LIB_NAME.toLowerCase().intern();
private static final String LOWER_SHORT_LIB_NAME = SHORT_LIB_NAME.toLowerCase().intern();
private static final GLFWErrorCallback ERROR_CB;
/**
* The default system streams, before overload.
*/
public static PrintStream sysout = System.out, syserr = System.err;
public static String PLATFORM_NAME = "unknown";
public static final String elPrintStr = String.format("[" + LIB_NAME + "-%s]", LUtils.VERSION);
public static void print(String msg) {
System.err.println(elPrintStr + " " + msg);
}
/**
* The top level of the game/tool
*/
public static String TOP_LEVEL = null;
static {
try {
// reuse KCore's data
LUtils.TOP_LEVEL =
Paths.get(".").toAbsolutePath().toString().replace(File.separatorChar, '/').replaceFirst("/$", "");
LUtils.print("Using TOP_LEVEL " + TOP_LEVEL);
} catch (Exception e) {
e.printStackTrace();
System.exit(-1);
}
}
/**
* The top level of emergency landing, used to load our shaders.
*/
private static String EL_TOP = null;
static {
String tempName = LUtils.class.getPackage().getName();
int levels = Strings.count(tempName, '.') + 2;
tempName = LUtils.class.getResource("LUtils.class").getFile()
// .replace('/', File.separatorChar)// .substring(1)
.replace("%20", " ");
for (int i = 0; i < levels; i++) {
tempName = tempName.substring(0, tempName.lastIndexOf("/"));
}
LUtils.print(tempName);
if (tempName.endsWith("!")) {
// jar files: natives are in TOP_LEVEL
LUtils.print("Assumed JAR launch.");
EL_TOP = TOP_LEVEL;
} else {
EL_TOP = ((tempName.startsWith("/") ? "" : "/") + tempName).replace("/C:/", "C:/").replace("\\C:\\",
"C:\\");
}
LUtils.print("Using EL_TOP " + EL_TOP);
}
static {
Configuration.DEBUG.set(true);
PLATFORM_NAME = Platform.get().getName();
String osName = System.getProperty("os.name");
if (osName.startsWith("SunOS")) {
PLATFORM_NAME = "solaris";
}
overrideStandardStreams();
ERROR_CB = GLFWErrorCallback.createPrint();
GLFW.glfwSetErrorCallback(ERROR_CB);
}
/**
* Adds the specified path to the java library path
*
* @param pathToAdd
* the path to add
* @throws Exception
* if the library path couldn't be added
*/
public static void addLibraryPath(String pathToAdd) throws Exception {
final Field usrPathsField = ClassLoader.class.getDeclaredField("usr_paths");
usrPathsField.setAccessible(true);
// get array of paths
final String[] paths = (String[]) usrPathsField.get(null);
// check if the path to add is already present
for (String path : paths) {
if (path.equals(pathToAdd)) {
return;
}
}
// add the new path
final String[] newPaths = Arrays.copyOf(paths, paths.length + 1);
newPaths[newPaths.length - 1] = pathToAdd;
usrPathsField.set(null, newPaths);
}
private static void overrideStandardStreams() {
System.err.println("Replacing streams with methodized...");
MethodizedSTDStream sysout = new MethodizedSTDStream(System.out);
System.setOut(new PrintStream(sysout));
MethodizedSTDStream syserr = new MethodizedSTDStream(System.err);
System.setErr(new PrintStream(syserr));
syserr.orig.println("Finished.");
}
public static final int debugLevel =
Integer.parseInt(System.getProperty(LOWER_SHORT_LIB_NAME + ".debug.level", "0"));
static {
System.err.println(LOWER_SHORT_LIB_NAME + ".debug.level" + ": " + debugLevel);
}
/**
* Gets a boolean argument safely
*
* @param args
* - the args list from which to retrieve the argument
* @param index
* - the index of the wanted argument
* @param def
* - a default value to fallback on
* @return the wanted boolean argument value, or the default value
*/
public static boolean getArgB(String[] args, int index, boolean def) {
return Boolean.valueOf(LUtils.getArgS(args, index, Boolean.valueOf(def).toString()));
}
/**
* Gets a integer argument safely
*
* @param args
* - the args list from which to retrieve the argument
* @param index
* - the index of the wanted argument
* @param def
* - a default value to fallback on
* @return the wanted integer argument value, or the default value
*/
public static int getArgI(String[] args, int index, int def) {
return Integer.valueOf(LUtils.getArgS(args, index, Integer.valueOf(def).toString()));
}
/**
* Gets a float argument safely
*
* @param args
* - the args list from which to retrieve the argument
* @param index
* - the index of the wanted argument
* @param def
* - a default value to fallback on
* @return the wanted float argument value, or the default value
*/
public static float getArgF(String[] args, int index, float def) {
return Float.valueOf(LUtils.getArgS(args, index, Float.valueOf(def).toString()));
}
/**
* Gets a double argument safely
*
* @param args
* - the args list from which to retrieve the argument
* @param index
* - the index of the wanted argument
* @param def
* - a default value to fallback on
* @return the wanted double argument value, or the default value
*/
public static double getArgD(String[] args, int index, double def) {
return Double.valueOf(LUtils.getArgS(args, index, Double.valueOf(def).toString()));
}
/**
* Gets a String argument safely
*
* @param args
* - the args list from which to retrieve the argument
* @param index
* - the index of the wanted argument
* @param def
* - a default value to fallback on
* @return the wanted String argument value, or the default value
*/
public static String getArgS(String[] args, int index, String def) {
if (args == null) {
return def;
}
return args.length <= index ? def : args[index] == null ? def : args[index];
}
/**
* Gets an argument safely
*
* @param src
* - the args list from which to retrieve the argument
* @param index
* - the index of the wanted argument
* @param def
* - a default value to fallback on
* @param <T>
* - The type of the argument
* @return the wanted argument value, or the default value
*/
public static <T> T getArg(T[] src, int index, T def) {
if (src == null) {
return def;
}
return src.length <= index ? def : src[index] == null ? def : src[index];
}
/**
* Checks for the given OpenGL version (eg. 3.0.2)
*
* @param vers
* - the wanted version
* @return true if the actual version is the same as or newer than the
* wanted version, false otherwise
*/
public static boolean isVersionAvaliable(String vers) {
String cver = getGLVer();
if (cver.indexOf(' ') > -1) {
cver = cver.substring(0, cver.indexOf(' '));
}
LUtils.print("Comparing " + cver + " to " + vers);
String[] cver_sep = cver.split("\\.", 3);
String[] vers_sep = vers.split("\\.", 3);
int[] cver_sepi = new int[3];
int[] vers_sepi = new int[3];
int min = LUtils.minAll(cver_sep.length, vers_sep.length, 3);
for (int i = 0; i < min; i++) {
cver_sepi[i] = Integer.parseInt(cver_sep[i]);
vers_sepi[i] = Integer.parseInt(vers_sep[i]);
}
boolean ret = cver_sepi[0] >= vers_sepi[0] && cver_sepi[1] >= vers_sepi[1] && cver_sepi[2] >= vers_sepi[2];
LUtils.print("Returning " + ret);
return ret;
}
/**
* Gets the smallest of all the given ints
*
* @param ints
* - the set of ints to use
* @return the smallest int from ints
*/
public static int minAll(int... ints) {
int min = Integer.MAX_VALUE;
for (int i : ints) {
// System.out.println("Comparing " + i + " and " + min);
min = Math.min(min, i);
}
// System.out.println("Result is " + min);
return min;
}
/**
* Turns a {@link Info} list into a list of user friendly strings
*
* @param info
* - the list of MidiDevice.Infos to use
* @return a list of Strings representing the given Infos
*/
public static List<String> getInfoAsString(Info[] info) {
List<String> out = new ArrayList<String>();
for (Info i : info) {
out.add(i + "" + i.getClass().getName());
}
return out;
}
/**
* Check for integer
*
* @param test
* - the String to check for integer
* @return if the String represents an integer
*/
public static boolean isInt(String test) {
try {
Integer.parseInt(test);
return true;
} catch (Exception e) {
return false;
}
}
/**
* Gets the current OpenGL version
*
* @return {@link GL11#GL_VERSION}
*/
public static String getGLVer() {
return glGetString(GL_VERSION);
}
/**
* Gets the first thing in the stack that is not the given class name
*
* @param name
* - a class name
* @return the class that is not the given class
*/
public static String getFirstEntryNotThis(String name) {
String ret = "no class found";
int level = StackTraceInfo.INVOKING_METHOD_ZERO;
try {
while (StackTraceInfo.getCurrentClassName(level).equals(name)) {
level++;
}
ret = StackTraceInfo.getCurrentClassName(level);
} catch (Exception e) {
e.printStackTrace();
}
return ret;
}
/**
* Gets an input stream from a path
*
* @param path
* - the path, must be absolute
* @param consumer
* - the stream consumer, shouldn't close resource
* @param <R>
* - The return type of the consumer
* @return the input stream, or null if not possible to get an input stream
* @throws IOException
* if there are I/O errors
*/
public static <R> R processPathData(String path, IOConsumer<R> consumer) throws IOException {
LUtils.print("[Retriving InputStream for '" + path + "']");
// Normalize to UNIX style
path = path.replace(File.separatorChar, '/');
int isType = 0; // undefined=-1;fileis=0;zipis=1;jaris=1
List<String> pathparts = Arrays.asList(path.split("/"));
for (String part : pathparts) {
if (part.endsWith(".zip") || part.endsWith("jar") && !(pathparts.indexOf(part) == pathparts.size() - 1)) {
if (isType == 1) {
isType = 2;
break;
} else {
isType = 1;
break;
}
}
}
if (isType == 0) {
LUtils.print("Using raw file input stream");
try (InputStream stream = new FileInputStream(path)) {
return consumer.consumeStream(stream);
}
} else if (isType == 1 || isType == 2) {
LUtils.print("Using recursive zip/jar searcher style " + isType);
ArrayList<Integer> indexes = new ArrayList<Integer>();
for (int i = 0; i < pathparts.size(); i++) {
if (pathparts.get(i).endsWith(".zip") || pathparts.get(i).endsWith(".jar")) {
LUtils.print("Adding zip/jar " + pathparts.get(i) + " at " + i);
indexes.add(i);
}
}
String pathToCurrFile = "";
for (int i = 0; i <= indexes.get(0); i++) {
String temp_ = pathparts.get(i);
LUtils.print(String.format("Appending '%s' to '%s'", temp_, pathToCurrFile));
pathToCurrFile += temp_ + "/";
}
String file = pathToCurrFile.substring(0, pathToCurrFile.length() - 1);
String extra = path.replace(pathToCurrFile, "");
LUtils.print("Attempting to load from " + file);
try (ZipFile zf = new ZipFile(file);) {
ZipEntry ze = zf.getEntry(extra);
try (InputStream stream = zf.getInputStream(ze)) {
return consumer.consumeStream(stream);
}
}
}
throw new IllegalArgumentException("Inaccessible data " + path);
}
public static String getELTop() {
return EL_TOP;
}
public static void setIcon(final InputStream is) {
throw new UnsupportedOperationException("GLFW does not support setting icons.");
/*
* GLFW cannot setIcons at this time if (Platform.get() ==
* Platform.MACOSX) { // Set in the dock try {
* Application.getApplication().setDockIconImage(ImageIO.read(is));
* System.err.println("Using 1 icon"); } catch (IOException e) {
* e.printStackTrace(); } return; } Runnable r = new Runnable() {
*
* @Override public void run() { ByteBuffer[] icondata =
* IconLoader.load(is); GLFW.glfic int used = Display.setIcon(icondata);
* System.err.println("Using " + used + " icon(s)"); } };
* System.err.println(Thread.currentThread());
* System.err.println(KMain.getDisplayThread()); if
* (Thread.currentThread() == KMain.getDisplayThread()) {
* System.err.println("Early icon load"); r.run(); } else {
* ELTexture.addRunnableToQueue(r); }
*/
}
public static ByteBuffer inputStreamToDirectByteBuffer(Supplier<InputStream> streamSupplier) throws IOException {
try (InputStream stream = streamSupplier.get()) {
byte[] data = ByteStreams.toByteArray(stream);
return (ByteBuffer) BufferUtils.createByteBuffer(data.length).put(data).rewind();
}
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.dynamodbv2.model;
import java.io.Serializable;
/**
* <p>
* Represents one of the following:
* </p>
* <ul>
* <li>
* <p>
* A new global secondary index to be added to an existing table.
* </p>
* </li>
* <li>
* <p>
* New provisioned throughput parameters for an existing global secondary index.
* </p>
* </li>
* <li>
* <p>
* An existing global secondary index to be removed from an existing table.
* </p>
* </li>
* </ul>
*/
public class GlobalSecondaryIndexUpdate implements Serializable, Cloneable {
/**
* <p>
* The name of an existing global secondary index, along with new
* provisioned throughput settings to be applied to that index.
* </p>
*/
private UpdateGlobalSecondaryIndexAction update;
/**
* <p>
* The parameters required for creating a global secondary index on an
* existing table:
* </p>
* <ul>
* <li>
* <p>
* <code>IndexName </code>
* </p>
* </li>
* <li>
* <p>
* <code>KeySchema </code>
* </p>
* </li>
* <li>
* <p>
* <code>AttributeDefinitions </code>
* </p>
* </li>
* <li>
* <p>
* <code>Projection </code>
* </p>
* </li>
* <li>
* <p>
* <code>ProvisionedThroughput </code>
* </p>
* </li>
* </ul>
*/
private CreateGlobalSecondaryIndexAction create;
/**
* <p>
* The name of an existing global secondary index to be removed.
* </p>
*/
private DeleteGlobalSecondaryIndexAction delete;
/**
* <p>
* The name of an existing global secondary index, along with new
* provisioned throughput settings to be applied to that index.
* </p>
*
* @param update
* The name of an existing global secondary index, along with new
* provisioned throughput settings to be applied to that index.
*/
public void setUpdate(UpdateGlobalSecondaryIndexAction update) {
this.update = update;
}
/**
* <p>
* The name of an existing global secondary index, along with new
* provisioned throughput settings to be applied to that index.
* </p>
*
* @return The name of an existing global secondary index, along with new
* provisioned throughput settings to be applied to that index.
*/
public UpdateGlobalSecondaryIndexAction getUpdate() {
return this.update;
}
/**
* <p>
* The name of an existing global secondary index, along with new
* provisioned throughput settings to be applied to that index.
* </p>
*
* @param update
* The name of an existing global secondary index, along with new
* provisioned throughput settings to be applied to that index.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public GlobalSecondaryIndexUpdate withUpdate(
UpdateGlobalSecondaryIndexAction update) {
setUpdate(update);
return this;
}
/**
* <p>
* The parameters required for creating a global secondary index on an
* existing table:
* </p>
* <ul>
* <li>
* <p>
* <code>IndexName </code>
* </p>
* </li>
* <li>
* <p>
* <code>KeySchema </code>
* </p>
* </li>
* <li>
* <p>
* <code>AttributeDefinitions </code>
* </p>
* </li>
* <li>
* <p>
* <code>Projection </code>
* </p>
* </li>
* <li>
* <p>
* <code>ProvisionedThroughput </code>
* </p>
* </li>
* </ul>
*
* @param create
* The parameters required for creating a global secondary index on
* an existing table:</p>
* <ul>
* <li>
* <p>
* <code>IndexName </code>
* </p>
* </li>
* <li>
* <p>
* <code>KeySchema </code>
* </p>
* </li>
* <li>
* <p>
* <code>AttributeDefinitions </code>
* </p>
* </li>
* <li>
* <p>
* <code>Projection </code>
* </p>
* </li>
* <li>
* <p>
* <code>ProvisionedThroughput </code>
* </p>
* </li>
*/
public void setCreate(CreateGlobalSecondaryIndexAction create) {
this.create = create;
}
/**
* <p>
* The parameters required for creating a global secondary index on an
* existing table:
* </p>
* <ul>
* <li>
* <p>
* <code>IndexName </code>
* </p>
* </li>
* <li>
* <p>
* <code>KeySchema </code>
* </p>
* </li>
* <li>
* <p>
* <code>AttributeDefinitions </code>
* </p>
* </li>
* <li>
* <p>
* <code>Projection </code>
* </p>
* </li>
* <li>
* <p>
* <code>ProvisionedThroughput </code>
* </p>
* </li>
* </ul>
*
* @return The parameters required for creating a global secondary index on
* an existing table:</p>
* <ul>
* <li>
* <p>
* <code>IndexName </code>
* </p>
* </li>
* <li>
* <p>
* <code>KeySchema </code>
* </p>
* </li>
* <li>
* <p>
* <code>AttributeDefinitions </code>
* </p>
* </li>
* <li>
* <p>
* <code>Projection </code>
* </p>
* </li>
* <li>
* <p>
* <code>ProvisionedThroughput </code>
* </p>
* </li>
*/
public CreateGlobalSecondaryIndexAction getCreate() {
return this.create;
}
/**
* <p>
* The parameters required for creating a global secondary index on an
* existing table:
* </p>
* <ul>
* <li>
* <p>
* <code>IndexName </code>
* </p>
* </li>
* <li>
* <p>
* <code>KeySchema </code>
* </p>
* </li>
* <li>
* <p>
* <code>AttributeDefinitions </code>
* </p>
* </li>
* <li>
* <p>
* <code>Projection </code>
* </p>
* </li>
* <li>
* <p>
* <code>ProvisionedThroughput </code>
* </p>
* </li>
* </ul>
*
* @param create
* The parameters required for creating a global secondary index on
* an existing table:</p>
* <ul>
* <li>
* <p>
* <code>IndexName </code>
* </p>
* </li>
* <li>
* <p>
* <code>KeySchema </code>
* </p>
* </li>
* <li>
* <p>
* <code>AttributeDefinitions </code>
* </p>
* </li>
* <li>
* <p>
* <code>Projection </code>
* </p>
* </li>
* <li>
* <p>
* <code>ProvisionedThroughput </code>
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public GlobalSecondaryIndexUpdate withCreate(
CreateGlobalSecondaryIndexAction create) {
setCreate(create);
return this;
}
/**
* <p>
* The name of an existing global secondary index to be removed.
* </p>
*
* @param delete
* The name of an existing global secondary index to be removed.
*/
public void setDelete(DeleteGlobalSecondaryIndexAction delete) {
this.delete = delete;
}
/**
* <p>
* The name of an existing global secondary index to be removed.
* </p>
*
* @return The name of an existing global secondary index to be removed.
*/
public DeleteGlobalSecondaryIndexAction getDelete() {
return this.delete;
}
/**
* <p>
* The name of an existing global secondary index to be removed.
* </p>
*
* @param delete
* The name of an existing global secondary index to be removed.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public GlobalSecondaryIndexUpdate withDelete(
DeleteGlobalSecondaryIndexAction delete) {
setDelete(delete);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getUpdate() != null)
sb.append("Update: " + getUpdate() + ",");
if (getCreate() != null)
sb.append("Create: " + getCreate() + ",");
if (getDelete() != null)
sb.append("Delete: " + getDelete());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GlobalSecondaryIndexUpdate == false)
return false;
GlobalSecondaryIndexUpdate other = (GlobalSecondaryIndexUpdate) obj;
if (other.getUpdate() == null ^ this.getUpdate() == null)
return false;
if (other.getUpdate() != null
&& other.getUpdate().equals(this.getUpdate()) == false)
return false;
if (other.getCreate() == null ^ this.getCreate() == null)
return false;
if (other.getCreate() != null
&& other.getCreate().equals(this.getCreate()) == false)
return false;
if (other.getDelete() == null ^ this.getDelete() == null)
return false;
if (other.getDelete() != null
&& other.getDelete().equals(this.getDelete()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getUpdate() == null) ? 0 : getUpdate().hashCode());
hashCode = prime * hashCode
+ ((getCreate() == null) ? 0 : getCreate().hashCode());
hashCode = prime * hashCode
+ ((getDelete() == null) ? 0 : getDelete().hashCode());
return hashCode;
}
@Override
public GlobalSecondaryIndexUpdate clone() {
try {
return (GlobalSecondaryIndexUpdate) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
package backup.gui;
import java.awt.Graphics;
import java.awt.image.BufferedImage;
import java.awt.image.DirectColorModel;
import java.awt.image.IndexColorModel;
import java.awt.image.WritableRaster;
import java.io.File;
import java.util.Enumeration;
import java.util.Hashtable;
import javax.swing.Icon;
import javax.swing.filechooser.FileSystemView;
import org.eclipse.jface.viewers.ILabelProvider;
import org.eclipse.jface.viewers.ILabelProviderListener;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.ImageData;
import org.eclipse.swt.graphics.PaletteData;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.swt.widgets.Display;
/**
* Label Provider for the file tree - handle the content handed out by the
* FileTreeContentProvider
*
* <pre>
* Copyright (c) 2010 Daniel Armbrust. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* The license was included with the download.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </pre>
* @author <A HREF="mailto:daniel.armbrust@gmail.com">Dan Armbrust</A>
*/
public class FileTreeLabelProvider implements ILabelProvider
{
private Display display_;
private Hashtable<String, Image> images_ = new Hashtable<String, Image>();
public FileTreeLabelProvider(Display display)
{
display_ = display;
}
public Image getImage(Object arg0)
{
File file = (File) arg0;
String extension = "";
try
{
if (file.getAbsolutePath().endsWith(":\\"))
{
extension = file.getAbsolutePath();
}
else if (file.isDirectory())
{
extension = "-folder-";
}
else
{
int pos = (file.getName().lastIndexOf('.'));
if (pos > 0)
{
extension = file.getName().substring(pos, file.getName().length())
.toLowerCase();
}
}
Image image = images_.get(extension);
if (image != null)
{
return image;
}
else
{
// SWT doesn't yet have a way to get system images for files.
// Swing does - use that - and convert.
Icon icon = FileSystemView.getFileSystemView().getSystemIcon(file);
BufferedImage buffImage = new BufferedImage(icon.getIconWidth(), icon
.getIconHeight(), BufferedImage.TYPE_INT_ARGB);
// Draw Image into BufferedImage
Graphics g = buffImage.getGraphics();
icon.paintIcon(null, g, 0, 0);
ImageData imageData = convertToSWT(buffImage);
image = ((imageData != null) ? new Image(display_, imageData) : null);
images_.put(extension, image);
return image;
}
}
catch (Exception e)
{
e.printStackTrace();
return null;
}
}
private static ImageData convertToSWT(BufferedImage bufferedImage)
{
if (bufferedImage.getColorModel() instanceof DirectColorModel)
{
DirectColorModel colorModel = (DirectColorModel) bufferedImage.getColorModel();
PaletteData palette = new PaletteData(colorModel.getRedMask(), colorModel
.getGreenMask(), colorModel.getBlueMask());
ImageData data = new ImageData(bufferedImage.getWidth(), bufferedImage.getHeight(),
colorModel.getPixelSize(), palette);
WritableRaster raster = bufferedImage.getRaster();
int[] pixelArray = new int[4];
for (int y = 0; y < data.height; y++)
{
for (int x = 0; x < data.width; x++)
{
raster.getPixel(x, y, pixelArray);
int pixel;
if (pixelArray[3] < 100)
{
// if the transparancy is low - set it to white.
pixel = palette.getPixel(new RGB(255, 255, 255));
}
else
{
pixel = palette.getPixel(new RGB(pixelArray[0], pixelArray[1],
pixelArray[2]));
}
data.setPixel(x, y, pixel);
}
}
return data;
}
else if (bufferedImage.getColorModel() instanceof IndexColorModel)
{
IndexColorModel colorModel = (IndexColorModel) bufferedImage.getColorModel();
int size = colorModel.getMapSize();
byte[] reds = new byte[size];
byte[] greens = new byte[size];
byte[] blues = new byte[size];
colorModel.getReds(reds);
colorModel.getGreens(greens);
colorModel.getBlues(blues);
RGB[] rgbs = new RGB[size];
for (int i = 0; i < rgbs.length; i++)
{
rgbs[i] = new RGB(reds[i] & 0xFF, greens[i] & 0xFF, blues[i] & 0xFF);
}
PaletteData palette = new PaletteData(rgbs);
ImageData data = new ImageData(bufferedImage.getWidth(), bufferedImage.getHeight(),
colorModel.getPixelSize(), palette);
data.transparentPixel = colorModel.getTransparentPixel();
WritableRaster raster = bufferedImage.getRaster();
int[] pixelArray = new int[1];
for (int y = 0; y < data.height; y++)
{
for (int x = 0; x < data.width; x++)
{
raster.getPixel(x, y, pixelArray);
data.setPixel(x, y, pixelArray[0]);
}
}
return data;
}
return null;
}
public String getText(Object arg0)
{
File file = (File) arg0;
if (file.getName().length() == 0)
{
return file.getAbsolutePath();
}
else
{
return file.getName();
}
}
public void addListener(ILabelProviderListener arg0)
{
// noop
}
public void dispose()
{
Enumeration<Image> en = images_.elements();
while (en.hasMoreElements())
{
en.nextElement().dispose();
}
images_.clear();
}
public boolean isLabelProperty(Object arg0, String arg1)
{
// I dunno
return false;
}
public void removeListener(ILabelProviderListener arg0)
{
//noop
}
}
| |
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.core.util.mail.ui;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.olat.core.id.Identity;
import org.olat.core.id.ModifiedInfo;
import org.olat.core.id.Persistable;
import org.olat.core.id.Preferences;
import org.olat.core.id.User;
import org.olat.core.id.UserConstants;
class EMailIdentity implements Identity {
private static final long serialVersionUID = -2899896628137672419L;
private final String email;
private final User user;
private final Locale locale;
public EMailIdentity(String email, Locale locale) {
this.email = email;
user = new EMailUser(email);
this.locale = locale;
}
@Override
public Long getKey() {
return null;
}
@Override
public boolean equalsByPersistableKey(Persistable persistable) {
return this == persistable;
}
@Override
public Date getCreationDate() {
return null;
}
@Override
public String getName() {
return email;
}
@Override
public User getUser() {
return user;
}
@Override
public Date getLastLogin() {
return null;
}
@Override
public void setLastLogin(Date loginDate) {/**/
}
@Override
public Integer getStatus() {
return null;
}
@Override
public void setStatus(Integer newStatus) {/**/
}
@Override
public void setName(String name) {/**/
}
private class EMailUser implements User, ModifiedInfo {
private static final long serialVersionUID = 7260225880639460228L;
private final EMailPreferences prefs = new EMailPreferences();
private Map<String, String> data = new HashMap<String, String>();
public EMailUser(String email) {
data.put(UserConstants.FIRSTNAME, "");
data.put(UserConstants.LASTNAME, "");
data.put(UserConstants.EMAIL, email);
}
public Long getKey() {
return null;
}
public boolean equalsByPersistableKey(Persistable persistable) {
return this == persistable;
}
public Date getLastModified() {
return null;
}
@Override
public void setLastModified(Date date) {
//
}
public Date getCreationDate() {
return null;
}
public void setProperty(String propertyName, String propertyValue) {
//
}
public void setPreferences(Preferences prefs) {
//
}
public String getProperty(String propertyName, Locale locale) {
return data.get(propertyName);
}
public void setIdentityEnvironmentAttributes(Map<String, String> identEnvAttribs) {/**/
}
public String getPropertyOrIdentityEnvAttribute(String propertyName, Locale locale) {
return data.get(propertyName);
}
public Preferences getPreferences() {
return prefs;
}
}
private class EMailPreferences implements Preferences {
private static final long serialVersionUID = 7039109437910126584L;
@Override
public String getLanguage() {
return locale.getLanguage();
}
@Override
public void setLanguage(String l) {
//
}
@Override
public String getFontsize() {
return null;
}
@Override
public void setFontsize(String l) {
//
}
@Override
public String getNotificationInterval() {
return null;
}
@Override
public void setNotificationInterval(String notificationInterval) {/* */
}
@Override
public String getReceiveRealMail() {
return "true";
}
@Override
public void setReceiveRealMail(String receiveRealMail) {
//
}
@Override
public boolean getInformSessionTimeout() {
return false;
}
@Override
public void setInformSessionTimeout(boolean b) {/* */
}
@Override
public boolean getPresenceMessagesPublic() {
return false;
}
@Override
public void setPresenceMessagesPublic(boolean b) {/* */
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.planner.plan;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import io.prestosql.sql.planner.SortExpressionContext;
import io.prestosql.sql.planner.Symbol;
import io.prestosql.sql.tree.ComparisonExpression;
import io.prestosql.sql.tree.Expression;
import io.prestosql.sql.tree.Join;
import javax.annotation.concurrent.Immutable;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.prestosql.sql.planner.SortExpressionExtractor.extractSortExpression;
import static io.prestosql.sql.planner.plan.JoinNode.DistributionType.PARTITIONED;
import static io.prestosql.sql.planner.plan.JoinNode.DistributionType.REPLICATED;
import static io.prestosql.sql.planner.plan.JoinNode.Type.FULL;
import static io.prestosql.sql.planner.plan.JoinNode.Type.INNER;
import static io.prestosql.sql.planner.plan.JoinNode.Type.LEFT;
import static io.prestosql.sql.planner.plan.JoinNode.Type.RIGHT;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
@Immutable
public class JoinNode
extends PlanNode
{
private final Type type;
private final PlanNode left;
private final PlanNode right;
private final List<EquiJoinClause> criteria;
private final List<Symbol> outputSymbols;
private final Optional<Expression> filter;
private final Optional<Symbol> leftHashSymbol;
private final Optional<Symbol> rightHashSymbol;
private final Optional<DistributionType> distributionType;
private final Optional<Boolean> spillable;
@JsonCreator
public JoinNode(@JsonProperty("id") PlanNodeId id,
@JsonProperty("type") Type type,
@JsonProperty("left") PlanNode left,
@JsonProperty("right") PlanNode right,
@JsonProperty("criteria") List<EquiJoinClause> criteria,
@JsonProperty("outputSymbols") List<Symbol> outputSymbols,
@JsonProperty("filter") Optional<Expression> filter,
@JsonProperty("leftHashSymbol") Optional<Symbol> leftHashSymbol,
@JsonProperty("rightHashSymbol") Optional<Symbol> rightHashSymbol,
@JsonProperty("distributionType") Optional<DistributionType> distributionType,
@JsonProperty("spillable") Optional<Boolean> spillable)
{
super(id);
requireNonNull(type, "type is null");
requireNonNull(left, "left is null");
requireNonNull(right, "right is null");
requireNonNull(criteria, "criteria is null");
requireNonNull(outputSymbols, "outputSymbols is null");
requireNonNull(filter, "filter is null");
requireNonNull(leftHashSymbol, "leftHashSymbol is null");
requireNonNull(rightHashSymbol, "rightHashSymbol is null");
requireNonNull(distributionType, "distributionType is null");
requireNonNull(spillable, "spillable is null");
this.type = type;
this.left = left;
this.right = right;
this.criteria = ImmutableList.copyOf(criteria);
this.outputSymbols = ImmutableList.copyOf(outputSymbols);
this.filter = filter;
this.leftHashSymbol = leftHashSymbol;
this.rightHashSymbol = rightHashSymbol;
this.distributionType = distributionType;
this.spillable = spillable;
Set<Symbol> inputSymbols = ImmutableSet.<Symbol>builder()
.addAll(left.getOutputSymbols())
.addAll(right.getOutputSymbols())
.build();
checkArgument(new HashSet<>(inputSymbols).containsAll(outputSymbols), "Left and right join inputs do not contain all output symbols");
checkArgument(!isCrossJoin() || inputSymbols.size() == outputSymbols.size(), "Cross join does not support output symbols pruning or reordering");
checkArgument(!(criteria.isEmpty() && leftHashSymbol.isPresent()), "Left hash symbol is only valid in an equijoin");
checkArgument(!(criteria.isEmpty() && rightHashSymbol.isPresent()), "Right hash symbol is only valid in an equijoin");
if (distributionType.isPresent()) {
// The implementation of full outer join only works if the data is hash partitioned.
checkArgument(
!(distributionType.get() == REPLICATED && (type == RIGHT || type == FULL)),
"%s join do not work with %s distribution type",
type,
distributionType.get());
// It does not make sense to PARTITION when there is nothing to partition on
checkArgument(
!(distributionType.get() == PARTITIONED && criteria.isEmpty() && type != RIGHT && type != FULL),
"Equi criteria are empty, so %s join should not have %s distribution type",
type,
distributionType.get());
}
}
public JoinNode flipChildren()
{
return new JoinNode(
getId(),
flipType(type),
right,
left,
flipJoinCriteria(criteria),
flipOutputSymbols(getOutputSymbols(), left, right),
filter,
rightHashSymbol,
leftHashSymbol,
distributionType,
spillable);
}
private static Type flipType(Type type)
{
switch (type) {
case INNER:
return INNER;
case FULL:
return FULL;
case LEFT:
return RIGHT;
case RIGHT:
return LEFT;
default:
throw new IllegalStateException("No inverse defined for join type: " + type);
}
}
private static List<EquiJoinClause> flipJoinCriteria(List<EquiJoinClause> joinCriteria)
{
return joinCriteria.stream()
.map(EquiJoinClause::flip)
.collect(toImmutableList());
}
private static List<Symbol> flipOutputSymbols(List<Symbol> outputSymbols, PlanNode left, PlanNode right)
{
List<Symbol> leftSymbols = outputSymbols.stream()
.filter(symbol -> left.getOutputSymbols().contains(symbol))
.collect(Collectors.toList());
List<Symbol> rightSymbols = outputSymbols.stream()
.filter(symbol -> right.getOutputSymbols().contains(symbol))
.collect(Collectors.toList());
return ImmutableList.<Symbol>builder()
.addAll(rightSymbols)
.addAll(leftSymbols)
.build();
}
public enum DistributionType
{
PARTITIONED,
REPLICATED
}
public enum Type
{
INNER("InnerJoin"),
LEFT("LeftJoin"),
RIGHT("RightJoin"),
FULL("FullJoin");
private final String joinLabel;
Type(String joinLabel)
{
this.joinLabel = joinLabel;
}
public String getJoinLabel()
{
return joinLabel;
}
public static Type typeConvert(Join.Type joinType)
{
switch (joinType) {
case CROSS:
case IMPLICIT:
case INNER:
return Type.INNER;
case LEFT:
return Type.LEFT;
case RIGHT:
return Type.RIGHT;
case FULL:
return Type.FULL;
default:
throw new UnsupportedOperationException("Unsupported join type: " + joinType);
}
}
}
@JsonProperty("type")
public Type getType()
{
return type;
}
@JsonProperty("left")
public PlanNode getLeft()
{
return left;
}
@JsonProperty("right")
public PlanNode getRight()
{
return right;
}
@JsonProperty("criteria")
public List<EquiJoinClause> getCriteria()
{
return criteria;
}
@JsonProperty("filter")
public Optional<Expression> getFilter()
{
return filter;
}
public Optional<SortExpressionContext> getSortExpressionContext()
{
return filter
.flatMap(filter -> extractSortExpression(ImmutableSet.copyOf(right.getOutputSymbols()), filter));
}
@JsonProperty("leftHashSymbol")
public Optional<Symbol> getLeftHashSymbol()
{
return leftHashSymbol;
}
@JsonProperty("rightHashSymbol")
public Optional<Symbol> getRightHashSymbol()
{
return rightHashSymbol;
}
@Override
public List<PlanNode> getSources()
{
return ImmutableList.of(left, right);
}
@Override
@JsonProperty("outputSymbols")
public List<Symbol> getOutputSymbols()
{
return outputSymbols;
}
@JsonProperty("distributionType")
public Optional<DistributionType> getDistributionType()
{
return distributionType;
}
@JsonProperty("spillable")
public Optional<Boolean> isSpillable()
{
return spillable;
}
@Override
public <R, C> R accept(PlanVisitor<R, C> visitor, C context)
{
return visitor.visitJoin(this, context);
}
@Override
public PlanNode replaceChildren(List<PlanNode> newChildren)
{
checkArgument(newChildren.size() == 2, "expected newChildren to contain 2 nodes");
return new JoinNode(getId(), type, newChildren.get(0), newChildren.get(1), criteria, outputSymbols, filter, leftHashSymbol, rightHashSymbol, distributionType, spillable);
}
public JoinNode withDistributionType(DistributionType distributionType)
{
return new JoinNode(getId(), type, left, right, criteria, outputSymbols, filter, leftHashSymbol, rightHashSymbol, Optional.of(distributionType), spillable);
}
public JoinNode withSpillable(boolean spillable)
{
return new JoinNode(getId(), type, left, right, criteria, outputSymbols, filter, leftHashSymbol, rightHashSymbol, distributionType, Optional.of(spillable));
}
public boolean isCrossJoin()
{
return criteria.isEmpty() && !filter.isPresent() && type == INNER;
}
public static class EquiJoinClause
{
private final Symbol left;
private final Symbol right;
@JsonCreator
public EquiJoinClause(@JsonProperty("left") Symbol left, @JsonProperty("right") Symbol right)
{
this.left = requireNonNull(left, "left is null");
this.right = requireNonNull(right, "right is null");
}
@JsonProperty("left")
public Symbol getLeft()
{
return left;
}
@JsonProperty("right")
public Symbol getRight()
{
return right;
}
public ComparisonExpression toExpression()
{
return new ComparisonExpression(ComparisonExpression.Operator.EQUAL, left.toSymbolReference(), right.toSymbolReference());
}
public EquiJoinClause flip()
{
return new EquiJoinClause(right, left);
}
@Override
public boolean equals(Object obj)
{
if (this == obj) {
return true;
}
if (obj == null || !this.getClass().equals(obj.getClass())) {
return false;
}
EquiJoinClause other = (EquiJoinClause) obj;
return Objects.equals(this.left, other.left) &&
Objects.equals(this.right, other.right);
}
@Override
public int hashCode()
{
return Objects.hash(left, right);
}
@Override
public String toString()
{
return format("%s = %s", left, right);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.io.network.partition.consumer;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.memory.MemorySegmentProvider;
import org.apache.flink.runtime.checkpoint.channel.ChannelStateWriter;
import org.apache.flink.runtime.io.network.NettyShuffleEnvironment;
import org.apache.flink.runtime.io.network.buffer.BufferDecompressor;
import org.apache.flink.runtime.io.network.buffer.BufferPool;
import org.apache.flink.runtime.io.network.buffer.NoOpBufferPool;
import org.apache.flink.runtime.io.network.partition.InputChannelTestUtils;
import org.apache.flink.runtime.io.network.partition.PartitionProducerStateProvider;
import org.apache.flink.runtime.io.network.partition.ResultPartitionType;
import org.apache.flink.runtime.jobgraph.IntermediateDataSetID;
import org.apache.flink.runtime.taskmanager.NettyShuffleEnvironmentConfiguration;
import org.apache.flink.runtime.throughput.BufferDebloatConfiguration;
import org.apache.flink.runtime.throughput.BufferDebloater;
import org.apache.flink.runtime.throughput.ThroughputCalculator;
import org.apache.flink.util.clock.SystemClock;
import org.apache.flink.util.function.SupplierWithException;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.IntStream;
/** Utility class to encapsulate the logic of building a {@link SingleInputGate} instance. */
public class SingleInputGateBuilder {
public static final PartitionProducerStateProvider NO_OP_PRODUCER_CHECKER =
(dsid, id, consumer) -> {};
private final IntermediateDataSetID intermediateDataSetID = new IntermediateDataSetID();
private final int bufferSize = 4096;
private ResultPartitionType partitionType = ResultPartitionType.PIPELINED;
private int consumedSubpartitionIndex = 0;
private int gateIndex = 0;
private int numberOfChannels = 1;
private PartitionProducerStateProvider partitionProducerStateProvider = NO_OP_PRODUCER_CHECKER;
private BufferDecompressor bufferDecompressor = null;
private MemorySegmentProvider segmentProvider =
InputChannelTestUtils.StubMemorySegmentProvider.getInstance();
private ChannelStateWriter channelStateWriter = ChannelStateWriter.NO_OP;
@Nullable
private BiFunction<InputChannelBuilder, SingleInputGate, InputChannel> channelFactory = null;
private SupplierWithException<BufferPool, IOException> bufferPoolFactory = NoOpBufferPool::new;
private BufferDebloatConfiguration bufferDebloatConfiguration =
BufferDebloatConfiguration.fromConfiguration(new Configuration());
private Function<BufferDebloatConfiguration, ThroughputCalculator> createThroughputCalculator =
config -> new ThroughputCalculator(SystemClock.getInstance());
public SingleInputGateBuilder setPartitionProducerStateProvider(
PartitionProducerStateProvider partitionProducerStateProvider) {
this.partitionProducerStateProvider = partitionProducerStateProvider;
return this;
}
public SingleInputGateBuilder setResultPartitionType(ResultPartitionType partitionType) {
this.partitionType = partitionType;
return this;
}
public SingleInputGateBuilder setConsumedSubpartitionIndex(int consumedSubpartitionIndex) {
this.consumedSubpartitionIndex = consumedSubpartitionIndex;
return this;
}
public SingleInputGateBuilder setSingleInputGateIndex(int gateIndex) {
this.gateIndex = gateIndex;
return this;
}
public SingleInputGateBuilder setNumberOfChannels(int numberOfChannels) {
this.numberOfChannels = numberOfChannels;
return this;
}
public SingleInputGateBuilder setupBufferPoolFactory(NettyShuffleEnvironment environment) {
NettyShuffleEnvironmentConfiguration config = environment.getConfiguration();
this.bufferPoolFactory =
SingleInputGateFactory.createBufferPoolFactory(
environment.getNetworkBufferPool(), config.floatingNetworkBuffersPerGate());
this.segmentProvider = environment.getNetworkBufferPool();
return this;
}
public SingleInputGateBuilder setBufferPoolFactory(BufferPool bufferPool) {
this.bufferPoolFactory = () -> bufferPool;
return this;
}
public SingleInputGateBuilder setBufferDecompressor(BufferDecompressor bufferDecompressor) {
this.bufferDecompressor = bufferDecompressor;
return this;
}
public SingleInputGateBuilder setSegmentProvider(MemorySegmentProvider segmentProvider) {
this.segmentProvider = segmentProvider;
return this;
}
/** Adds automatic initialization of all channels with the given factory. */
public SingleInputGateBuilder setChannelFactory(
BiFunction<InputChannelBuilder, SingleInputGate, InputChannel> channelFactory) {
this.channelFactory = channelFactory;
return this;
}
public SingleInputGateBuilder setChannelStateWriter(ChannelStateWriter channelStateWriter) {
this.channelStateWriter = channelStateWriter;
return this;
}
public SingleInputGateBuilder setBufferDebloatConfiguration(
BufferDebloatConfiguration configuration) {
this.bufferDebloatConfiguration = configuration;
return this;
}
public SingleInputGateBuilder setThroughputCalculator(
Function<BufferDebloatConfiguration, ThroughputCalculator> createThroughputCalculator) {
this.createThroughputCalculator = createThroughputCalculator;
return this;
}
public SingleInputGate build() {
SingleInputGate gate =
new SingleInputGate(
"Single Input Gate",
gateIndex,
intermediateDataSetID,
partitionType,
consumedSubpartitionIndex,
numberOfChannels,
partitionProducerStateProvider,
bufferPoolFactory,
bufferDecompressor,
segmentProvider,
bufferSize,
createThroughputCalculator.apply(bufferDebloatConfiguration),
maybeCreateBufferDebloater(gateIndex));
if (channelFactory != null) {
gate.setInputChannels(
IntStream.range(0, numberOfChannels)
.mapToObj(
index ->
channelFactory.apply(
InputChannelBuilder.newBuilder()
.setStateWriter(channelStateWriter)
.setChannelIndex(index),
gate))
.toArray(InputChannel[]::new));
}
return gate;
}
private BufferDebloater maybeCreateBufferDebloater(int gateIndex) {
if (bufferDebloatConfiguration.isEnabled()) {
return new BufferDebloater(
gateIndex,
bufferDebloatConfiguration.getTargetTotalBufferSize().toMillis(),
bufferDebloatConfiguration.getMaxBufferSize(),
bufferDebloatConfiguration.getMinBufferSize(),
bufferDebloatConfiguration.getBufferDebloatThresholdPercentages(),
bufferDebloatConfiguration.getNumberOfSamples());
}
return null;
}
}
| |
package org.gameontext.mediator;
import java.lang.reflect.Field;
import java.text.MessageFormat;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import javax.enterprise.concurrent.ManagedScheduledExecutorService;
import javax.enterprise.concurrent.ManagedThreadFactory;
import javax.enterprise.concurrent.Trigger;
import javax.websocket.Session;
import org.gameontext.mediator.ClientMediator;
import org.gameontext.mediator.Constants;
import org.gameontext.mediator.Log;
import org.gameontext.mediator.MapClient;
import org.gameontext.mediator.MediatorBuilder;
import org.gameontext.mediator.MediatorNexus;
import org.gameontext.mediator.PlayerClient;
import org.gameontext.mediator.WSDrain;
import org.gameontext.mediator.MediatorNexus.ClientMediatorPod;
import org.gameontext.mediator.MediatorNexus.UserView;
import org.gameontext.mediator.models.Exit;
import org.gameontext.mediator.models.Exits;
import org.gameontext.mediator.models.RoomInfo;
import org.gameontext.mediator.models.Site;
import org.gameontext.mediator.room.FirstRoom;
import org.gameontext.mediator.room.RemoteRoomProxy;
import org.gameontext.mediator.room.RoomMediator;
import org.gameontext.mediator.room.RoomMediator.Type;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import mockit.Expectations;
import mockit.Injectable;
import mockit.Mock;
import mockit.MockUp;
import mockit.Mocked;
import mockit.Tested;
import mockit.Verifications;
import mockit.integration.junit4.JMockit;
/**
* @author elh
*
*/
@RunWith(JMockit.class)
public class MediatorBuilderTest {
@Tested MediatorBuilder builder;
@Injectable MediatorNexus nexus;
@Injectable MapClient mapClient;
@Injectable PlayerClient playerClient;
@Injectable ManagedThreadFactory threadFactory;
@Injectable ManagedScheduledExecutorService scheduledExecutor;
@Injectable String systemId;
static final String signedJwt = "testJwt";
static final String userId = "dummy.DevUser";
static final String userName = "DevUser";
static final UserView userView = new UserView() {
@Override
public String getUserId() {
return userId;
}
@Override
public String getUserName() {
// TODO Auto-generated method stub
return userName;
}
};
static final String roomId = "roomId";
static final String roomName = "roomName";
static final String roomFullName = "roomFullName";
@Rule
public TestName testName = new TestName();
@Before
public void before() {
System.out.println("-- " + testName.getMethodName() + " --------------------------------------");
new MockUp<Log>() {
@Mock
public void log(Level level, Object source, String msg, Object[] params) {
System.out.println("Log: " + MessageFormat.format(msg, params));
}
@Mock
public void log(Level level, Object source, String msg, Throwable thrown) {
System.out.println("Log: " + msg + ": " + thrown.getMessage());
thrown.printStackTrace(System.out);
}
};
}
@Test
public void testPostConstruct() {
builder.postConstruct();
new Verifications() {{
nexus.setBuilder(builder);
}};
}
@Test
public void testBuildClientMediator(@Mocked Session session,
@Mocked WSDrain drain) {
new Expectations() {{
drain.start();
new WSDrain(userId, session); result = drain;
}};
ClientMediator client = builder.buildClientMediator(userId, session, null, signedJwt);
Assert.assertEquals(userId, client.getUserId());
new Verifications() {{
drain.start(); times = 1; // drain to client should be started
}};
}
@Test
public void testGetFirstRoomMediator(@Mocked ClientMediatorPod client,
@Mocked Site firstRoomSite) {
new Expectations() {{
mapClient.getSite(Constants.FIRST_ROOM); result = firstRoomSite;
}};
FirstRoom fr = (FirstRoom) builder.getFirstRoomMediator(client);
Assert.assertEquals(Type.FIRST_ROOM, fr.getType());
Assert.assertEquals(Constants.FIRST_ROOM, fr.getName());
}
@Test
public void testGetFirstRoomMediatorNullSite(@Mocked ClientMediatorPod client) {
new Expectations() {{
mapClient.getSite(Constants.FIRST_ROOM); result = null;
}};
FirstRoom fr = (FirstRoom) builder.getFirstRoomMediator(client);
Assert.assertEquals(Type.FIRST_ROOM, fr.getType());
Assert.assertEquals(Constants.FIRST_ROOM, fr.getName());
}
@Test
public void testFindMediatorForRoomNull(@Mocked ClientMediatorPod client) {
RoomMediator room = builder.findMediatorForRoom(client, null);
Assert.assertEquals(Type.FIRST_ROOM, room.getType());
Assert.assertEquals(Constants.FIRST_ROOM, room.getName());
}
@Test
public void testFindMediatorForRoomEmpty(@Mocked ClientMediatorPod client) {
RoomMediator room = builder.findMediatorForRoom(client, "");
Assert.assertEquals(Type.FIRST_ROOM, room.getType());
Assert.assertEquals(Constants.FIRST_ROOM, room.getName());
}
@Test
public void testFindMediatorForRoomFirstRoom(@Mocked ClientMediatorPod client) {
RoomMediator room = builder.findMediatorForRoom(client, Constants.FIRST_ROOM);
Assert.assertEquals(Type.FIRST_ROOM, room.getType());
Assert.assertEquals(Constants.FIRST_ROOM, room.getName());
}
@Test
public void testFindMediatorForRoomNoSite(@Mocked ClientMediatorPod client) {
new Expectations() {{
mapClient.getSite(roomId); result = null;
}};
RoomMediator room = builder.findMediatorForRoom(client, roomId);
Assert.assertEquals(Type.FIRST_ROOM, room.getType());
Assert.assertEquals(Constants.FIRST_ROOM, room.getName());
}
@Test
public void testFindMediatorForRoomEmptySite(@Mocked ClientMediatorPod client,
@Mocked Site site) {
new Expectations() {{
mapClient.getSite(roomId); result = site;
site.getInfo(); result = null;
}};
RoomMediator room = builder.findMediatorForRoom(client, roomId);
Assert.assertEquals(Type.EMPTY, room.getType());
}
@Test
public void testFindMediatorForRoomSite(@Mocked ClientMediatorPod client,
@Mocked Site site,
@Mocked RoomInfo info) {
new Expectations() {{
mapClient.getSite(roomId); result = site;
site.getInfo(); result = info;
info.getName(); result = roomName;
}};
RoomMediator room = builder.findMediatorForRoom(client, roomId);
Assert.assertEquals(Type.CONNECTING, room.getType());
Assert.assertEquals(roomName, room.getName());
}
@Test
public void testFindMediatorForExitNoTarget(@Mocked ClientMediatorPod client,
@Mocked RoomMediator startingRoom,
@Mocked Exits exits) {
new Expectations() {{
startingRoom.getExits(); result = exits;
exits.getExit("N"); result = null;
}};
RoomMediator room = builder.findMediatorForExit(client, startingRoom, "N");
Assert.assertSame(startingRoom, room);
}
@Test
public void testFindMediatorForExitFirstRoom(@Mocked ClientMediatorPod client,
@Mocked RoomMediator startingRoom,
@Mocked Exits exits) {
new Expectations() {{
startingRoom.getExits(); result = exits;
exits.getExit("N").getId(); result = Constants.FIRST_ROOM;
}};
RoomMediator room = builder.findMediatorForExit(client, startingRoom, "N");
Assert.assertEquals(Type.FIRST_ROOM, room.getType());
Assert.assertEquals(Constants.FIRST_ROOM, room.getName());
}
@Test
public void testFindMediatorForExitNoSite(@Mocked ClientMediatorPod client,
@Mocked RoomMediator startingRoom,
@Mocked Exit north) {
Exits exits = new Exits();
exits.setN(north);
new Expectations() {{
startingRoom.getExits(); result = exits;
north.getId(); result = roomId;
mapClient.getSite(roomId); result = null;
}};
// Fallback site built from original exit connection details
RoomMediator room = builder.findMediatorForExit(client, startingRoom, "N");
// The generated fallback exit should have starting room to the opposite
// side (S), since we're attempting to use startingRoom's north door.
Exits genExits = room.getExits();
Assert.assertEquals(roomId, genExits.getS().getId());
// The fallback uses the original exit information, which
// includes connection details to make a first pass with..
Assert.assertEquals(Type.CONNECTING, room.getType());
}
@Test
public void testFindMediatorForExitEmptySite(@Mocked ClientMediatorPod client,
@Mocked RoomMediator startingRoom,
@Mocked Exits exits,
@Mocked Site site) {
new Expectations() {{
startingRoom.getExits(); result = exits;
exits.getExit("N").getId(); result = roomId;
mapClient.getSite(roomId); result = site;
site.getInfo(); result = null;
}};
RoomMediator room = builder.findMediatorForExit(client, startingRoom, "N");
Assert.assertEquals(Type.EMPTY, room.getType());
}
@Test
public void testFindMediatorForExitSite(@Mocked ClientMediatorPod client,
@Mocked RoomMediator startingRoom,
@Mocked Exits exits,
@Mocked Site site,
@Mocked RoomInfo info) {
new Expectations() {{
startingRoom.getExits(); result = exits;
exits.getExit("N").getId(); result = roomId;
mapClient.getSite(roomId); result = site;
site.getInfo(); result = info;
}};
RoomMediator room = builder.findMediatorForExit(client, startingRoom, "N");
Assert.assertEquals(Type.CONNECTING, room.getType());
}
@Test
public void testCreateDelegateEmpty(@Mocked Site site1) {
new Expectations() {{
site1.getId(); result = roomId;
site1.getInfo(); result = null;
}};
RemoteRoomProxy proxy = new RemoteRoomProxy(builder, userView, site1.getId());
Assert.assertEquals(Type.EMPTY, proxy.getType()); // proxy type should reflect the guts!
}
@Test
public void testCreateConnectingDelegateHelloBadConnectionType(@Mocked Site site1,
@Mocked RoomInfo info,
@Mocked ClientMediatorPod pod1) throws Exception {
// Special mock of sched exec to run immediately
builder.scheduledExecutor = new FakeExecutor();
new Expectations() {{
site1.getId(); result = roomId;
site1.getInfo(); result = info;
info.getName(); result = roomName;
info.getFullName(); result = roomFullName;
info.getConnectionDetails().getType(); result = "unknown";
}};
RemoteRoomProxy proxy = new RemoteRoomProxy(builder, userView, roomId);
Field field_updating = RemoteRoomProxy.class.getDeclaredField("updating");
field_updating.setAccessible(true);
AtomicBoolean updating = (AtomicBoolean) field_updating.get(proxy);
Assert.assertEquals(Type.CONNECTING, proxy.getType()); // proxy type should reflect the guts!
// Attempt connection with bad type
proxy.hello(pod1);
Assert.assertFalse("Updating flag should be reset to false", updating.get());
Assert.assertEquals(Type.SICK, proxy.getType()); // proxy type should reflect the guts!
}
@Test
public void testCreateConnectingDelegateHello(@Mocked Site site1,
@Mocked RoomInfo info) {
new Expectations() {{
site1.getId(); result = roomId;
site1.getInfo(); result = info;
}};
RemoteRoomProxy proxy = new RemoteRoomProxy(builder, userView, site1.getId());
Assert.assertEquals(Type.CONNECTING, proxy.getType()); // proxy type should reflect the guts!
// Attept connection
}
@Test
public void testCreateConnectingDelegateJoin(@Mocked Site site1,
@Mocked RoomInfo info) {
new Expectations() {{
site1.getId(); result = roomId;
site1.getInfo(); result = info;
}};
RemoteRoomProxy proxy = new RemoteRoomProxy(builder, userView, site1.getId());
Assert.assertEquals(Type.CONNECTING, proxy.getType()); // proxy type should reflect the guts!
// Attept connection
}
final class FakeExecutor implements ManagedScheduledExecutorService {
public void execute(Runnable command) {
System.out.println("HEY!!!");
command.run();
}
@Override
public boolean awaitTermination(long arg0, TimeUnit arg1) throws InterruptedException {
return false;
}
@Override
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> arg0) throws InterruptedException {
return null;
}
@Override
public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> arg0, long arg1, TimeUnit arg2)
throws InterruptedException {
return null;
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> arg0) throws InterruptedException, ExecutionException {
return null;
}
@Override
public <T> T invokeAny(Collection<? extends Callable<T>> arg0, long arg1, TimeUnit arg2)
throws InterruptedException, ExecutionException, TimeoutException {
return null;
}
@Override
public boolean isShutdown() {
return false;
}
@Override
public boolean isTerminated() {
return false;
}
@Override
public void shutdown() {
}
@Override
public List<Runnable> shutdownNow() {
return null;
}
@Override
public <T> Future<T> submit(Callable<T> arg0) {
return null;
}
@Override
public Future<?> submit(Runnable arg0) {
return null;
}
@Override
public <T> Future<T> submit(Runnable arg0, T arg1) {
return null;
}
@Override
public ScheduledFuture<?> schedule(Runnable arg0, long arg1, TimeUnit arg2) {
return null;
}
@Override
public <V> ScheduledFuture<V> schedule(Callable<V> arg0, long arg1, TimeUnit arg2) {
return null;
}
@Override
public ScheduledFuture<?> scheduleAtFixedRate(Runnable arg0, long arg1, long arg2, TimeUnit arg3) {
return null;
}
@Override
public ScheduledFuture<?> scheduleWithFixedDelay(Runnable arg0, long arg1, long arg2, TimeUnit arg3) {
return null;
}
@Override
public ScheduledFuture<?> schedule(Runnable command, Trigger trigger) {
return null;
}
@Override
public <V> ScheduledFuture<V> schedule(Callable<V> callable, Trigger trigger) {
return null;
}
}
}
| |
/*
* Copyright 2022 Red Hat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.hal.client.bootstrap.endpoint;
import org.jboss.gwt.elemento.core.Elements;
import org.jboss.hal.ballroom.Alert;
import org.jboss.hal.ballroom.dialog.Dialog;
import org.jboss.hal.ballroom.form.ButtonItem;
import org.jboss.hal.ballroom.form.Form;
import org.jboss.hal.ballroom.form.FormItem;
import org.jboss.hal.ballroom.table.Scope;
import org.jboss.hal.ballroom.table.Table;
import org.jboss.hal.config.Endpoints;
import org.jboss.hal.core.mbui.form.ModelNodeForm;
import org.jboss.hal.core.mbui.table.ModelNodeTable;
import org.jboss.hal.meta.Metadata;
import org.jboss.hal.resources.Constants;
import org.jboss.hal.resources.Icons;
import org.jboss.hal.resources.Ids;
import org.jboss.hal.resources.Messages;
import com.google.common.base.Strings;
import com.google.gwt.core.client.GWT;
import com.google.gwt.user.client.rpc.AsyncCallback;
import elemental2.dom.HTMLButtonElement;
import elemental2.dom.HTMLElement;
import static org.jboss.gwt.elemento.core.Elements.div;
import static org.jboss.gwt.elemento.core.Elements.p;
import static org.jboss.hal.ballroom.dialog.Dialog.PRIMARY_POSITION;
import static org.jboss.hal.ballroom.table.RefreshMode.HOLD;
import static org.jboss.hal.client.bootstrap.endpoint.Endpoint.SCHEME;
import static org.jboss.hal.client.bootstrap.endpoint.EndpointDialog.Mode.ADD;
import static org.jboss.hal.client.bootstrap.endpoint.EndpointDialog.Mode.SELECT;
import static org.jboss.hal.dmr.ModelDescriptionConstants.HOST;
import static org.jboss.hal.dmr.ModelDescriptionConstants.NAME;
import static org.jboss.hal.dmr.ModelDescriptionConstants.PORT;
/**
* Modal dialog to manage bootstrap servers. The dialog offers a page to connect to an existing server and a page to add new
* servers.
*/
class EndpointDialog {
private static final Constants CONSTANTS = GWT.create(Constants.class);
private static final Messages MESSAGES = GWT.create(Messages.class);
private static final EndpointResources RESOURCES = GWT.create(EndpointResources.class);
private final EndpointManager manager;
private final EndpointStorage storage;
private final HTMLElement selectPage;
private final HTMLElement addPage;
private final Alert alert;
private final Form<Endpoint> form;
private Mode mode;
private Table<Endpoint> table;
private Dialog dialog;
EndpointDialog(EndpointManager manager, EndpointStorage storage) {
this.manager = manager;
this.storage = storage;
Metadata metadata = Metadata.staticDescription(RESOURCES.endpoint());
table = new ModelNodeTable.Builder<Endpoint>(Ids.ENDPOINT_SELECT, metadata)
.button(CONSTANTS.add(), table -> switchTo(ADD))
.button(CONSTANTS.remove(), table -> {
storage.remove(table.selectedRow());
this.table.update(storage.list(), HOLD);
dialog.getButton(PRIMARY_POSITION).disabled = !this.table.hasSelection();
}, Scope.SELECTED)
.column(NAME)
.column("url", "URL", (cell, type, row, meta) -> row.getUrl())
.build();
selectPage = div()
.add(p().textContent(CONSTANTS.endpointSelectDescription()))
.add(table.element()).element();
alert = new Alert();
ButtonItem ping = new ButtonItem(Ids.ENDPOINT_PING, CONSTANTS.ping());
ping.onClick((event) -> {
Endpoint endpoint = transientEndpoint();
manager.pingServer(endpoint, new AsyncCallback<Void>() {
@Override
public void onFailure(Throwable throwable) {
alert.setIcon(Icons.ERROR)
.setText(MESSAGES.endpointError(endpoint.getUrl(), Endpoints.getBaseUrl()));
Elements.setVisible(alert.element(), true);
}
@Override
public void onSuccess(Void aVoid) {
alert.setIcon(Icons.OK).setText(MESSAGES.endpointOk(endpoint.getUrl()));
Elements.setVisible(alert.element(), true);
}
});
});
form = new ModelNodeForm.Builder<Endpoint>(Ids.ENDPOINT_ADD, metadata)
.addOnly()
.include(NAME, SCHEME, HOST, PORT)
.unboundFormItem(ping)
.unsorted()
.onCancel((form) -> switchTo(SELECT))
.onSave((form, changedValues) -> {
Endpoint endpoint = form.getModel();
if (!endpoint.hasDefined(HOST)) {
endpoint.get(HOST).set(EndpointManager.DEFAULT_HOST);
}
if (!endpoint.hasDefined(PORT)) {
endpoint.get(PORT).set(EndpointManager.DEFAULT_PORT);
}
storage.add(endpoint);
switchTo(SELECT);
select(endpoint);
})
.build();
addPage = div()
.add(p().textContent(CONSTANTS.endpointAddDescription()))
.add(alert)
.add(form.element()).element();
dialog = new Dialog.Builder(CONSTANTS.endpointSelectTitle())
.add(selectPage, addPage)
.primary(CONSTANTS.endpointConnect(), this::onPrimary)
.secondary(this::onSecondary)
.closeIcon(false)
.closeOnEsc(false)
.build();
dialog.registerAttachable(form, table);
}
private void select(Endpoint endpoint) {
if (mode == SELECT) {
table.select(endpoint);
}
}
private Endpoint transientEndpoint() {
Endpoint endpoint = new Endpoint();
endpoint.setName("__transientEndpoint__"); // NON-NLS
FormItem<String> scheme = form.getFormItem(SCHEME);
endpoint.get(SCHEME).set(scheme.getValue());
FormItem<String> host = form.getFormItem(HOST);
if (Strings.isNullOrEmpty(host.getValue())) {
endpoint.get(HOST).set(EndpointManager.DEFAULT_HOST);
} else {
endpoint.get(HOST).set(host.getValue());
}
FormItem<Number> port = form.getFormItem(PORT);
if (port.getValue() == null) {
endpoint.get(PORT).set(EndpointManager.DEFAULT_PORT);
} else {
endpoint.get(PORT).set(port.getValue().intValue());
}
return endpoint;
}
private void switchTo(Mode mode) {
HTMLButtonElement primaryButton = dialog.getButton(PRIMARY_POSITION);
if (mode == SELECT) {
dialog.setTitle(CONSTANTS.endpointSelectTitle());
table.update(storage.list(), HOLD);
primaryButton.textContent = CONSTANTS.endpointConnect();
primaryButton.disabled = !table.hasSelection();
Elements.setVisible(addPage, false);
Elements.setVisible(selectPage, true);
} else if (mode == ADD) {
dialog.setTitle(CONSTANTS.endpointAddTitle());
Elements.setVisible(alert.element(), false);
form.edit(new Endpoint());
primaryButton.textContent = CONSTANTS.add();
primaryButton.disabled = false;
Elements.setVisible(selectPage, false);
Elements.setVisible(addPage, true);
}
this.mode = mode;
}
private boolean onPrimary() {
if (mode == SELECT) {
manager.onConnect(table.selectedRow());
return true;
} else if (mode == ADD) {
form.save();
return false;
}
return false;
}
private boolean onSecondary() {
if (mode == SELECT) {
// TODO Show an error message "You need to select a management interface"
} else if (mode == ADD) {
form.cancel();
switchTo(SELECT);
}
return false; // don't close the dialog!
}
void show() {
dialog.show();
table.onSelectionChange(t -> dialog.getButton(PRIMARY_POSITION).disabled = !t.hasSelection());
table.update(storage.list());
switchTo(SELECT);
storage.list().stream()
.filter(Endpoint::isSelected)
.findAny()
.ifPresent(this::select);
}
enum Mode {
SELECT, ADD
}
}
| |
/*
Part of the G4P library for Processing
http://www.lagers.org.uk/g4p/index.html
http://sourceforge.net/projects/g4p/files/?source=navbar
Copyright (c) 2014 Peter Lager
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General
Public License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place, Suite 330,
Boston, MA 02111-1307 USA
*/
package g4p_controls;
import g4p_controls.HotSpot.HSrect;
import g4p_controls.StyledString.TextLayoutHitInfo;
import g4p_controls.StyledString.TextLayoutInfo;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.font.TextHitInfo;
import java.awt.font.TextLayout;
import java.awt.geom.GeneralPath;
import java.util.LinkedList;
import processing.core.PApplet;
import processing.core.PGraphics;
import processing.awt.PGraphicsJava2D;
import processing.event.KeyEvent;
import processing.event.MouseEvent;
/**
* The password field component. <br>
*
* This control allows the user to secretly enter a password and supports an
* optional horizontal scrollbar. <br>
*
* Each key typed will display a <b>#</b> character, but the user can specify another character. <br>
*
* Unlike a text field it does not support text selection or the copying and pasting of
* text via the clipboard. <br>
*
* Fires CHANGED, ENTERED, LOST_FOCUS, GETS_FOCUS events.<br>
* The focus events are only fired if the control is added to a GTabManager object. <br>
*
* @author Peter Lager
*
*/
public final class GPassword extends GAbstractControl implements Focusable{
protected TextLayoutHitInfo cursorTLHI = new TextLayoutHitInfo();
private static char cover = '#';
private StyledString stext = new StyledString(" ");
private StyledString hidden = new StyledString(" ");
private int maxWordLength = 10;
private int wordLength = 0;
GTabManager tabManager = null;
// The width to break a line
protected int wrapWidth = Integer.MAX_VALUE;
// The typing area
protected float tx,ty,th,tw;
// Offset to display area
protected float ptx, pty;
// Caret position
protected float caretX, caretY;
protected boolean keepCursorInView = false;
protected GeneralPath gpTextDisplayArea;
// Used for identifying selection and cursor position
protected TextLayoutHitInfo startTLHI = new TextLayoutHitInfo();
protected TextLayoutHitInfo endTLHI = new TextLayoutHitInfo();
// The scrollbars available
// protected final int scrollbarPolicy;
// protected boolean autoHide = false;
// protected GScrollbar hsb, vsb;
protected GTimer caretFlasher;
protected boolean showCaret = false;
// Stuff to manage text selections
protected int endChar = -1, startChar = -1, pos = endChar, nbr = 0, adjust = 0;
protected boolean textChanged = false;
protected Font localFont = G4P.globalFont;
/**
* Create a password field without a scrollbar.
*
* @param theApplet
* @param p0
* @param p1
* @param p2
* @param p3
*/
public GPassword(PApplet theApplet, float p0, float p1, float p2, float p3) {
this(theApplet, p0, p1, p2, p3, SCROLLBARS_NONE);
}
/**
* Create a password field with the given scrollbar policy. <br>
* This policy can be one of these <br>
* <ul>
* <li>SCROLLBARS_NONE</li>
* <li>SCROLLBARS_HORIZONTAL_ONLY</li>
* </ul>
* If you want the scrollbar to auto hide then perform a logical or with
* <ul>
* <li>SCROLLBARS_AUTOHIDE</li>
* </ul>
* e.g. SCROLLBARS_HORIZONTAL_ONLY | SCROLLBARS_AUTOHIDE
* <br>
* @param theApplet
* @param p0
* @param p1
* @param p2
* @param p3
* @param sbPolicy
*/
public GPassword(PApplet theApplet, float p0, float p1, float p2, float p3, int sbPolicy) {
super(theApplet, p0, p1, p2, p3);
// scrollbarPolicy = sbPolicy;
// autoHide = ((sbPolicy & SCROLLBARS_AUTOHIDE) == SCROLLBARS_AUTOHIDE);
caretFlasher = new GTimer(theApplet, this, "flashCaret", 400);
caretFlasher.start();
opaque = true;
cursorOver = TEXT;
setVisibleChar(cover);
children = new LinkedList<GAbstractControl>();
tx = ty = 2;
tw = width - 2 * 2;
// th = height - ((scrollbarPolicy & SCROLLBAR_HORIZONTAL) != 0 ? 11 : 0);
th = height;
gpTextDisplayArea = new GeneralPath();
gpTextDisplayArea.moveTo( 0, 0);
gpTextDisplayArea.lineTo( 0, th);
gpTextDisplayArea.lineTo(tw, th);
gpTextDisplayArea.lineTo(tw, 0);
gpTextDisplayArea.closePath();
hotspots = new HotSpot[]{
new HSrect(1, tx, ty, tw, th), // typing area
new HSrect(9, 0, 0, width, height) // control surface
};
G4P.pushStyle();
G4P.showMessages = false;
z = Z_STICKY;
G4P.control_mode = GControlMode.CORNER;
// if((scrollbarPolicy & SCROLLBAR_HORIZONTAL) != 0){
// hsb = new GScrollbar(theApplet, 0, 0, tw, 10);
// addControl(hsb, tx, ty + th + 2, 0);
// hsb.addEventHandler(this, "hsbEventHandler");
// hsb.setAutoHide(autoHide);
// }
G4P.popStyle();
// z = Z_STICKY;
createEventHandler(G4P.sketchWindow, "handlePasswordEvents",
new Class<?>[]{ GPassword.class, GEvent.class },
new String[]{ "pwordControl", "event" }
);
registeredMethods = PRE_METHOD | DRAW_METHOD | MOUSE_METHOD | KEY_METHOD;
// Must register control
G4P.registerControl(this);
bufferInvalid = true;
}
/**
* Set the character that will be displayed instead of the actual character
* entered by the user. <br>
* Default value is '#'
*/
public void setVisibleChar(char c){
int ascii = (int) c;
if((ascii >= 33 && ascii <= 255 && ascii != 127) || ascii == 8364)
cover = c;
}
/**
* Get the current password (hidden) value of this field.
* @return actual password text
*/
public String getPassword(){
String password = hidden.getPlainText();
return password.equals(" ") ? "" : password;
}
/**
* Get the current length of the password entered.
*/
public int getWordLength(){
return wordLength;
}
/**
* Sets the max length of the password. This method is ignored if the control
* already holds some user input. <br>
* The default value is 10.
* @param ml the new max length (must be >= 1)
*/
public void setMaxWordLength(int ml){
if(wordLength == 0 && ml >= 1)
maxWordLength = ml;
}
/**
* Set the font to be used in this control
*
* @param font AWT font to use
*/
public void setFont(Font font) {
if(font != null && font != localFont && buffer != null){
localFont = font;
buffer.g2.setFont(localFont);
bufferInvalid = true;
}
}
public PGraphics getSnapshot(){
updateBuffer();
PGraphicsJava2D snap = (PGraphicsJava2D) winApp.createGraphics(buffer.width, buffer.height, PApplet.JAVA2D);
snap.beginDraw();
snap.image(buffer,0,0);
// if(hsb != null){
// snap.pushMatrix();
// snap.translate(hsb.getX(), hsb.getY());
// snap.image(hsb.getBuffer(), 0, 0);
// snap.popMatrix();
// }
snap.endDraw();
return snap;
}
public void pre(){
if(keepCursorInView){
boolean horzScroll = false;
float max_ptx = caretX - tw + 2;
if(endTLHI != null){
if(ptx > caretX){ // Scroll to the left (text moves right)
ptx -= localFont.getSize()/3;
if(ptx < 0) ptx = 0;
horzScroll = true;
}
else if(ptx < max_ptx){ // Scroll to the right (text moves left)?
ptx += localFont.getSize()/1.5f;
if(ptx > max_ptx) ptx = max_ptx;
horzScroll = true;
}
// Ensure that we show as much text as possible keeping the caret in view
// This is particularly important when deleting from the end of the text
if(ptx > 0 && endTLHI.tli.layout.getAdvance() - ptx < tw - 2){
ptx = Math.max(0, endTLHI.tli.layout.getAdvance() - tw - 2);
horzScroll = true;
}
// if(horzScroll && hsb != null)
// hsb.setValue(ptx / (stext.getMaxLineLength() + 4));
}
// If we have scrolled invalidate the buffer otherwise forget it
if(horzScroll)
bufferInvalid = true;
else
keepCursorInView = false;
}
}
/**
* Do not call this directly. A timer calls this method as and when required.
*/
public void flashCaret(GTimer timer){
showCaret = !showCaret;
}
public void mouseEvent(MouseEvent event){
if(!visible || !enabled || !available) return;
calcTransformedOrigin(winApp.mouseX, winApp.mouseY);
ox -= tx; oy -= ty; // Remove translation
currSpot = whichHotSpot(ox, oy);
if(currSpot == 1 || focusIsWith == this)
cursorIsOver = this;
else if(cursorIsOver == this)
cursorIsOver = null;
switch(event.getAction()){
case MouseEvent.PRESS:
if(currSpot == 1){
if(focusIsWith != this && z >= focusObjectZ()){
keepCursorInView = true;
takeFocus();
}
dragging = false;
if(stext == null || stext.length() == 0){
stext = new StyledString(" ", wrapWidth);
stext.getLines(buffer.g2);
}
endTLHI = stext.calculateFromXY(buffer.g2, ox + ptx, oy + pty);
startTLHI = new TextLayoutHitInfo(endTLHI);
calculateCaretPos(endTLHI);
bufferInvalid = true;
}
else { // Not over this control so if we have focus loose it
if(focusIsWith == this)
loseFocus(null);
}
break;
case MouseEvent.RELEASE:
dragging = false;
bufferInvalid = true;
break;
}
}
public void keyEvent(KeyEvent e) {
if(!visible || !enabled || !available) return;
if(focusIsWith == this && endTLHI != null){
char keyChar = e.getKey();
int keyCode = e.getKeyCode();
int keyID = e.getAction();
boolean shiftDown = e.isShiftDown();
boolean ctrlDown = e.isControlDown();
textChanged = false;
keepCursorInView = true;
// int startPos = pos, startNbr = nbr;
// Get selection details
endChar = endTLHI.tli.startCharIndex + endTLHI.thi.getInsertionIndex();
startChar = (startTLHI != null) ? startTLHI.tli.startCharIndex + startTLHI.thi.getInsertionIndex() : endChar;
pos = endChar;
nbr = 0;
adjust = 0;
if(endChar != startChar){ // Have we some text selected?
if(startChar < endChar){ // Forward selection
pos = startChar; nbr = endChar - pos;
}
else if(startChar > endChar){ // Backward selection
pos = endChar; nbr = startChar - pos;
}
}
// Select either keyPressedProcess or keyTypeProcess. These two methods are overridden in child classes
if(keyID == KeyEvent.PRESS) {
keyPressedProcess(keyCode, keyChar, shiftDown, ctrlDown);
// setScrollbarValues(ptx, pty);
}
else if(keyID == KeyEvent.TYPE ){ // && e.getKey() != KeyEvent.CHAR_UNDEFINED && !ctrlDown){
keyTypedProcess(keyCode, keyChar, shiftDown, ctrlDown);
// setScrollbarValues(ptx, pty);
}
if(textChanged){
changeText();
fireEvent(this, GEvent.CHANGED);
}
}
}
protected void keyPressedProcess(int keyCode, char keyChar, boolean X, boolean ctrlDown){
boolean cursorMoved = true;
switch(keyCode){
case LEFT:
moveCaretLeft(endTLHI);
break;
case RIGHT:
moveCaretRight(endTLHI);
break;
case GConstants.HOME:
moveCaretStartOfLine(endTLHI);
break;
case GConstants.END:
moveCaretEndOfLine(endTLHI);
break;
default:
cursorMoved = false;
}
if(cursorMoved){
calculateCaretPos(endTLHI);
startTLHI.copyFrom(endTLHI);
}
}
protected void keyTypedProcess(int keyCode, char keyChar, boolean shiftDown, boolean ctrlDown){
int ascii = (int)keyChar;
if(isDisplayable(ascii) && wordLength < maxWordLength){
if(hidden.getPlainText().equals(" ")){
stext.setText(""+cover);
hidden.setText(""+keyChar);
}
else {
stext.insertCharacters( "" + cover, pos);
hidden.insertCharacters("" + keyChar, pos);
}
wordLength++;
adjust = 1; textChanged = true;
}
else if(keyChar == BACKSPACE){
if(stext.deleteCharacters(pos - 1, 1)){
hidden.deleteCharacters(pos - 1, 1);
wordLength = --wordLength < 0 ? 0: wordLength;
adjust = -1; textChanged = true;
}
}
else if(keyChar == DELETE){
if(stext.deleteCharacters(pos, 1)){
hidden.deleteCharacters(pos, 1);
wordLength--;
adjust = 0; textChanged = true;
}
}
else if(keyChar == ENTER || keyChar == RETURN) {
fireEvent(this, GEvent.ENTERED);
// If we have a tab manager and can tab forward then do so
if(tabManager != null && tabManager.nextControl(this)){
startTLHI.copyFrom(endTLHI);
return;
}
}
else if(keyChar == TAB){
// If possible move to next text control
if(tabManager != null){
boolean result = (shiftDown) ? tabManager.prevControl(this) : tabManager.nextControl(this);
if(result){
startTLHI.copyFrom(endTLHI);
return;
}
}
}
// If we have emptied the text then recreate a one character string (space)
if(stext.length() == 0){
stext.insertCharacters(" ", 0);
hidden.insertCharacters(" ", 0);
adjust++; textChanged = true;
}
}
protected boolean changeText(){
TextLayoutInfo tli;
TextHitInfo thi = null, thiRight = null;
pos += adjust;
// Force layouts to be updated
stext.getLines(buffer.g2);
// Try to get text layout info for the current position
tli = stext.getTLIforCharNo(pos);
if(tli == null){
// If unable to get a layout for pos then reset everything
endTLHI = null;
startTLHI = null;
ptx = pty = 0;
caretX = caretY = 0;
return false;
}
// We have a text layout so we can do something
// First find the position in line
int posInLine = pos - tli.startCharIndex;
// Get some hit info so we can see what is happening
try{
thiRight = tli.layout.getNextRightHit(posInLine);
}
catch(Exception excp){
thiRight = null;
}
if(posInLine <= 0){ // At start of line
thi = tli.layout.getNextLeftHit(thiRight);
}
else if(posInLine >= tli.nbrChars){ // End of line
thi = tli.layout.getNextRightHit(tli.nbrChars - 1);
}
else { // Character in line;
thi = tli.layout.getNextLeftHit(thiRight);
}
endTLHI.setInfo(tli, thi);
// Cursor at end of paragraph graphic
calculateCaretPos(endTLHI);
bufferInvalid = true;
startTLHI.copyFrom(endTLHI);
return true;
}
public void draw(){
if(!visible) return;
updateBuffer();
winApp.pushStyle();
winApp.pushMatrix();
winApp.translate(cx, cy);
winApp.rotate(rotAngle);
winApp.pushMatrix();
// Move matrix to line up with top-left corner
winApp.translate(-halfWidth, -halfHeight);
// Draw buffer
winApp.imageMode(PApplet.CORNER);
if(alphaLevel < 255)
winApp.tint(TINT_FOR_ALPHA, alphaLevel);
winApp.image(buffer, 0, 0);
// Draw caret if text display area
if(focusIsWith == this && showCaret && endTLHI.tli != null){
float[] cinfo = endTLHI.tli.layout.getCaretInfo(endTLHI.thi);
float x_left = - ptx + cinfo[0];
float y_top = - pty + endTLHI.tli.yPosInPara;
float y_bot = y_top - cinfo[3] + cinfo[5];
if(x_left >= 0 && x_left <= tw && y_top >= 0 && y_bot <= th){
winApp.strokeWeight(1.9f);
winApp.stroke(palette[12].getRGB());
winApp.line(tx+x_left, ty+Math.max(0, y_top), tx+x_left, ty+Math.min(th, y_bot));
}
}
winApp.popMatrix();
if(children != null){
for(GAbstractControl c : children)
c.draw();
}
winApp.popMatrix();
winApp.popStyle();
}
/**
* If the buffer is invalid then redraw it.
* @TODO need to use palette for colours
*/
protected void updateBuffer(){
if(bufferInvalid) {
bufferInvalid = false;
buffer.beginDraw();
Graphics2D g2d = buffer.g2;
g2d.setFont(localFont);
// Get the latest lines of text
LinkedList<TextLayoutInfo> lines = stext.getLines(g2d);
// Whole control surface if opaque
if(opaque)
buffer.background(palette[6].getRGB());
else
buffer.background(buffer.color(255,0));
// Now move to top left corner of text display area
buffer.translate(tx,ty);
// Typing area surface
buffer.noStroke();
buffer.fill(palette[7].getRGB());
buffer.rect(-1,-1,tw+2,th+2);
g2d.setClip(gpTextDisplayArea);
buffer.translate(-ptx, -pty);
// Translate in preparation for display selection and text
// Display selection and text
for(TextLayoutInfo lineInfo : lines){
TextLayout layout = lineInfo.layout;
buffer.translate(0, layout.getAscent());
// Draw text
g2d.setColor(palette[2]);
lineInfo.layout.draw(g2d, 0, 0);
buffer.translate(0, layout.getDescent() + layout.getLeading());
}
g2d.setClip(null);
buffer.endDraw();
}
}
/**
* Give up focus but if the text is only made from spaces
* then set it to null text. <br>
* Fire focus events for the GTextField and GTextArea controls
*/
protected void loseFocus(GAbstractControl grabber){
// If this control has focus then Fire a lost focus event
if(focusIsWith == this)
fireEvent(this, GEvent.LOST_FOCUS);
// Process mouse-over cursor
if(cursorIsOver == this)
cursorIsOver = null;
focusIsWith = grabber;
// If only blank text clear it out allowing default text (if any) to be displayed
if(stext.length() > 0){
int tl = stext.getPlainText().trim().length();
if(tl == 0)
stext.setText("", wrapWidth);
}
keepCursorInView = true;
bufferInvalid = true;
}
/**
* Give the focus to this component but only after allowing the
* current component with focus to release it gracefully. <br>
* Always cancel the keyFocusIsWith irrespective of the component
* type.
* Fire focus events for the GTextField and GTextArea controls
*/
protected void takeFocus(){
// If focus is not yet with this control fire a gets focus event
if(focusIsWith != this){
// If the focus is with another control then tell
// that control to lose focus
if(focusIsWith != null)
focusIsWith.loseFocus(this);
fireEvent(this, GEvent.GETS_FOCUS);
}
focusIsWith = this;
}
/**
* Determines whether this component is to have focus or not. <br>
*/
public void setFocus(boolean focus){
if(!focus){
loseFocus(null);
return;
}
// Make sure we have some text
if(focusIsWith != this){
dragging = false;
if(stext == null || stext.length() == 0)
stext.setText(" ", wrapWidth);
LinkedList<TextLayoutInfo> lines = stext.getLines(buffer.g2);
startTLHI = new TextLayoutHitInfo(lines.getFirst(), null);
startTLHI.thi = startTLHI.tli.layout.getNextLeftHit(1);
endTLHI = new TextLayoutHitInfo(lines.getLast(), null);
int lastChar = endTLHI.tli.layout.getCharacterCount();
endTLHI.thi = startTLHI.tli.layout.getNextRightHit(lastChar-1);
startTLHI.copyFrom(endTLHI);
calculateCaretPos(endTLHI);
bufferInvalid = true;
}
keepCursorInView = true;
takeFocus();
}
/**
* Calculate the caret (text insertion point)
*
* @param tlhi
*/
protected void calculateCaretPos(TextLayoutHitInfo tlhi){
float temp[] = tlhi.tli.layout.getCaretInfo(tlhi.thi);
caretX = temp[0];
caretY = tlhi.tli.yPosInPara;
}
/**
* Move caret to home position
* @param currPos the current position of the caret
* @return true if caret moved else false
*/
protected boolean moveCaretStartOfLine(TextLayoutHitInfo currPos){
if(currPos.thi.getCharIndex() == 0)
return false; // already at start of line
currPos.thi = currPos.tli.layout.getNextLeftHit(1);
return true;
}
/**
* Move caret to the end of the line that has the current caret position
* @param currPos the current position of the caret
* @return true if caret moved else false
*/
protected boolean moveCaretEndOfLine(TextLayoutHitInfo currPos){
if(currPos.thi.getCharIndex() == currPos.tli.nbrChars - 1)
return false; // already at end of line
currPos.thi = currPos.tli.layout.getNextRightHit(currPos.tli.nbrChars - 1);
return true;
}
/**
* Move caret left by one character.
* @param currPos the current position of the caret
* @return true if caret moved else false
*/
protected boolean moveCaretLeft(TextLayoutHitInfo currPos){
TextHitInfo nthi = currPos.tli.layout.getNextLeftHit(currPos.thi);
if(nthi == null){
return false;
}
else {
// Move the caret to the left of current position
currPos.thi = nthi;
}
return true;
}
/**
* Move caret right by one character.
* @param currPos the current position of the caret
* @return true if caret moved else false
*/
protected boolean moveCaretRight(TextLayoutHitInfo currPos){
TextHitInfo nthi = currPos.tli.layout.getNextRightHit(currPos.thi);
if(nthi == null){
return false;
}
else {
currPos.thi = nthi;
}
return true;
}
// public void setJustify(boolean justify){
// stext.setJustify(justify);
// bufferInvalid = true;
// }
/**
* Sets the local colour scheme for this control
*/
public void setLocalColorScheme(int cs){
super.setLocalColorScheme(cs);
// if(hsb != null)
// hsb.setLocalColorScheme(localColorScheme);
// if(vsb != null)
// vsb.setLocalColorScheme(localColorScheme);
}
@Override
public void setTabManager(GTabManager tm){
tabManager = tm;
}
}
| |
/**
Copyright 2017 Andrea "Stock" Stocchero
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.pepstock.charba.client.configuration;
import java.util.List;
import org.pepstock.charba.client.callbacks.BorderDashCallback;
import org.pepstock.charba.client.callbacks.BorderDashOffsetCallback;
import org.pepstock.charba.client.callbacks.ColorCallback;
import org.pepstock.charba.client.callbacks.NativeCallback;
import org.pepstock.charba.client.callbacks.ScaleContext;
import org.pepstock.charba.client.callbacks.ScriptableFunctions.ProxyArrayCallback;
import org.pepstock.charba.client.callbacks.ScriptableUtils;
import org.pepstock.charba.client.callbacks.WidthCallback;
import org.pepstock.charba.client.colors.IsColor;
import org.pepstock.charba.client.commons.AbstractNode;
import org.pepstock.charba.client.commons.Array;
import org.pepstock.charba.client.commons.ArrayInteger;
import org.pepstock.charba.client.commons.CallbackProxy;
import org.pepstock.charba.client.commons.JsHelper;
import org.pepstock.charba.client.commons.Key;
/**
* It is used to configure angled lines that radiate from the center of the chart to the point labels.<br>
* Note that these options only apply if display is true.
*
* @author Andrea "Stock" Stocchero
*
*/
public class RadialAngleLines extends AbstractScaleLines {
// ---------------------------
// -- CALLBACKS PROXIES ---
// ---------------------------
// callback proxy to invoke the border dash function
private final CallbackProxy<ProxyArrayCallback> borderDashCallbackProxy = JsHelper.get().newCallbackProxy();
// border dashoffset callback instance
private BorderDashCallback<ScaleContext> borderDashCallback = null;
/**
* Name of properties of native object.
*/
private enum Property implements Key
{
BORDER_DASH("borderDash");
// name value of property
private final String value;
/**
* Creates with the property value to use in the native object.
*
* @param value value of property name
*/
private Property(String value) {
this.value = value;
}
/*
* (non-Javadoc)
*
* @see org.pepstock.charba.client.commons.Key#value()
*/
@Override
public String value() {
return value;
}
}
/**
* Builds the object storing the axis which this angle lines belongs to.
*
* @param axis axis which this angle lines belongs to.
*/
RadialAngleLines(Axis axis) {
super(axis, axis.getDefaultValues().getAngleLines());
// sets function to proxy callback in order to invoke the java interface
this.borderDashCallbackProxy.setCallback(context -> onBorderDash(getAxis().createContext(context), getBorderDashCallback(), getAxis().getDefaultValues().getAngleLines().getBorderDash()));
}
/*
* (non-Javadoc)
*
* @see org.pepstock.charba.client.configuration.AbstractScaleLines#getElement()
*/
@Override
AbstractNode getElement() {
return getAxis().getScale().getAngleLines();
}
/**
* If true, angle lines are shown
*
* @param display if true, angle lines are shown
*/
public void setDisplay(boolean display) {
getAxis().getScale().getAngleLines().setDisplay(display);
}
/**
* If true, angle lines are shown
*
* @return if true, angle lines are shown.
*/
public boolean isDisplay() {
return getAxis().getScale().getAngleLines().isDisplay();
}
/**
* Sets the color of angled lines.
*
* @param color color of angled lines.
*/
public void setColor(IsColor color) {
// reset callback if there is
setColor((ColorCallback<ScaleContext>) null);
// stores value
getAxis().getScale().getAngleLines().setColor(color);
}
/**
* Sets the color of angled lines.
*
* @param color color of angled lines.
*/
public void setColor(String color) {
// reset callback if there is
setColor((ColorCallback<ScaleContext>) null);
// stores value
getAxis().getScale().getAngleLines().setColor(color);
}
/**
* Returns the color of angled lines.
*
* @return color of angled lines.
*/
public String getColorAsString() {
return getAxis().getScale().getAngleLines().getColorAsString();
}
/**
* Returns the color of angled lines.
*
* @return color of angled lines.
*/
public IsColor getColor() {
return getAxis().getScale().getAngleLines().getColor();
}
/**
* Sets the width of angled lines.
*
* @param lineWidth width of angled lines.
*/
public void setLineWidth(int lineWidth) {
// reset callback if there is
setLineWidth((WidthCallback<ScaleContext>) null);
// stores value
getAxis().getScale().getAngleLines().setLineWidth(lineWidth);
}
/**
* Returns the width of angled lines.
*
* @return width of angled lines.
*/
public int getLineWidth() {
return getAxis().getScale().getAngleLines().getLineWidth();
}
/**
* Sets the line dash pattern used when stroking lines, using an array of values which specify alternating lengths of lines and gaps which describe the pattern.
*
* @param borderDash the line dash pattern used when stroking lines
*/
public void setBorderDash(int... borderDash) {
// reset callback if there is
setBorderDash((BorderDashCallback<ScaleContext>) null);
// stores value
getAxis().getScale().getAngleLines().setBorderDash(borderDash);
}
/**
* Returns the line dash pattern used when stroking lines, using an array of values which specify alternating lengths of lines and gaps which describe the pattern.
*
* @return the line dash pattern used when stroking lines
*/
public List<Integer> getBorderDash() {
return getAxis().getScale().getAngleLines().getBorderDash();
}
/**
* Sets the line dash pattern offset.
*
* @param borderDashOffset Offset for line dashes.
*/
public void setBorderDashOffset(double borderDashOffset) {
// reset callback if there is
setBorderDashOffset((BorderDashOffsetCallback<ScaleContext>) null);
// stores value
getAxis().getScale().getAngleLines().setBorderDashOffset(borderDashOffset);
}
/**
* Returns the line dash pattern offset.
*
* @return Offset for line dashes.
*/
public double getBorderDashOffset() {
return getAxis().getScale().getAngleLines().getBorderDashOffset();
}
/**
* Returns the border dash callback when element is hovered, if set, otherwise <code>null</code>.
*
* @return the border dash callback when element is hovered, if set, otherwise <code>null</code>.
*/
public BorderDashCallback<ScaleContext> getBorderDashCallback() {
return borderDashCallback;
}
/**
* Sets the border dash callback when element is hovered.
*
* @param borderDashCallback the border dash callback when element is hovered.
*/
public void setBorderDash(BorderDashCallback<ScaleContext> borderDashCallback) {
// sets the callback
this.borderDashCallback = borderDashCallback;
// stores and manages callback
getAxis().setCallback(getAxis().getConfiguration().getAngleLines(), Property.BORDER_DASH, borderDashCallback, borderDashCallbackProxy);
}
/**
* Sets the border dash callback when element is hovered.
*
* @param borderDashCallback the border dash callback when element is hovered.
*/
public void setBorderDash(NativeCallback borderDashCallback) {
// resets the callback
setBorderDash((BorderDashCallback<ScaleContext>) null);
// stores and manages callback
getAxis().setCallback(getAxis().getConfiguration().getAngleLines(), Property.BORDER_DASH, borderDashCallback);
}
/**
* Returns an array of integer when the callback has been activated.
*
* @param context native object as context.
* @param borderDashCallback border dash callback instance
* @param defaultValue default value of options
* @return an array of integer
*/
private Array onBorderDash(ScaleContext context, BorderDashCallback<ScaleContext> borderDashCallback, List<Integer> defaultValue) {
// gets value
List<Integer> result = ScriptableUtils.getOptionValue(context, borderDashCallback);
// checks if consistent
if (result != null) {
// returns result of callback
return ArrayInteger.fromOrEmpty(result);
}
// default result
return ArrayInteger.fromOrEmpty(defaultValue);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.unit;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.LogConfigurator;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.Locale;
import java.util.Objects;
public class ByteSizeValue implements Writeable, Comparable<ByteSizeValue>, ToXContentFragment {
/**
* We have to lazy initialize the deprecation logger as otherwise a static logger here would be constructed before logging is configured
* leading to a runtime failure (see {@link LogConfigurator#checkErrorListener()} ). The premature construction would come from any
* {@link ByteSizeValue} object constructed in, for example, settings in {@link org.elasticsearch.common.network.NetworkService}.
*/
static class DeprecationLoggerHolder {
static DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ByteSizeValue.class);
}
public static final ByteSizeValue ZERO = new ByteSizeValue(0, ByteSizeUnit.BYTES);
public static ByteSizeValue ofBytes(long size) {
return new ByteSizeValue(size);
}
public static ByteSizeValue ofKb(long size) {
return new ByteSizeValue(size, ByteSizeUnit.KB);
}
public static ByteSizeValue ofMb(long size) {
return new ByteSizeValue(size, ByteSizeUnit.MB);
}
public static ByteSizeValue ofGb(long size) {
return new ByteSizeValue(size, ByteSizeUnit.GB);
}
public static ByteSizeValue ofTb(long size) {
return new ByteSizeValue(size, ByteSizeUnit.TB);
}
public static ByteSizeValue ofPb(long size) {
return new ByteSizeValue(size, ByteSizeUnit.PB);
}
private final long size;
private final ByteSizeUnit unit;
public ByteSizeValue(StreamInput in) throws IOException {
size = in.readZLong();
unit = ByteSizeUnit.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeZLong(size);
unit.writeTo(out);
}
public ByteSizeValue(long bytes) {
this(bytes, ByteSizeUnit.BYTES);
}
public ByteSizeValue(long size, ByteSizeUnit unit) {
if (size < -1 || (size == -1 && unit != ByteSizeUnit.BYTES)) {
throw new IllegalArgumentException("Values less than -1 bytes are not supported: " + size + unit.getSuffix());
}
if (size > Long.MAX_VALUE / unit.toBytes(1)) {
throw new IllegalArgumentException(
"Values greater than " + Long.MAX_VALUE + " bytes are not supported: " + size + unit.getSuffix());
}
this.size = size;
this.unit = unit;
}
// For testing
long getSize() {
return size;
}
// For testing
ByteSizeUnit getUnit() {
return unit;
}
@Deprecated
public int bytesAsInt() {
long bytes = getBytes();
if (bytes > Integer.MAX_VALUE) {
throw new IllegalArgumentException("size [" + toString() + "] is bigger than max int");
}
return (int) bytes;
}
public long getBytes() {
return unit.toBytes(size);
}
public long getKb() {
return unit.toKB(size);
}
public long getMb() {
return unit.toMB(size);
}
public long getGb() {
return unit.toGB(size);
}
public long getTb() {
return unit.toTB(size);
}
public long getPb() {
return unit.toPB(size);
}
public double getKbFrac() {
return ((double) getBytes()) / ByteSizeUnit.C1;
}
public double getMbFrac() {
return ((double) getBytes()) / ByteSizeUnit.C2;
}
public double getGbFrac() {
return ((double) getBytes()) / ByteSizeUnit.C3;
}
public double getTbFrac() {
return ((double) getBytes()) / ByteSizeUnit.C4;
}
public double getPbFrac() {
return ((double) getBytes()) / ByteSizeUnit.C5;
}
/**
* @return a string representation of this value which is guaranteed to be
* able to be parsed using
* {@link #parseBytesSizeValue(String, ByteSizeValue, String)}.
* Unlike {@link #toString()} this method will not output fractional
* or rounded values so this method should be preferred when
* serialising the value to JSON.
*/
public String getStringRep() {
if (size <= 0) {
return String.valueOf(size);
}
return size + unit.getSuffix();
}
@Override
public String toString() {
long bytes = getBytes();
double value = bytes;
String suffix = ByteSizeUnit.BYTES.getSuffix();
if (bytes >= ByteSizeUnit.C5) {
value = getPbFrac();
suffix = ByteSizeUnit.PB.getSuffix();
} else if (bytes >= ByteSizeUnit.C4) {
value = getTbFrac();
suffix = ByteSizeUnit.TB.getSuffix();
} else if (bytes >= ByteSizeUnit.C3) {
value = getGbFrac();
suffix = ByteSizeUnit.GB.getSuffix();
} else if (bytes >= ByteSizeUnit.C2) {
value = getMbFrac();
suffix = ByteSizeUnit.MB.getSuffix();
} else if (bytes >= ByteSizeUnit.C1) {
value = getKbFrac();
suffix = ByteSizeUnit.KB.getSuffix();
}
return Strings.format1Decimals(value, suffix);
}
public static ByteSizeValue parseBytesSizeValue(String sValue, String settingName) throws ElasticsearchParseException {
return parseBytesSizeValue(sValue, null, settingName);
}
public static ByteSizeValue parseBytesSizeValue(String sValue, ByteSizeValue defaultValue, String settingName)
throws ElasticsearchParseException {
settingName = Objects.requireNonNull(settingName);
if (sValue == null) {
return defaultValue;
}
String lowerSValue = sValue.toLowerCase(Locale.ROOT).trim();
if (lowerSValue.endsWith("k")) {
return parse(sValue, lowerSValue, "k", ByteSizeUnit.KB, settingName);
} else if (lowerSValue.endsWith("kb")) {
return parse(sValue, lowerSValue, "kb", ByteSizeUnit.KB, settingName);
} else if (lowerSValue.endsWith("m")) {
return parse(sValue, lowerSValue, "m", ByteSizeUnit.MB, settingName);
} else if (lowerSValue.endsWith("mb")) {
return parse(sValue, lowerSValue, "mb", ByteSizeUnit.MB, settingName);
} else if (lowerSValue.endsWith("g")) {
return parse(sValue, lowerSValue, "g", ByteSizeUnit.GB, settingName);
} else if (lowerSValue.endsWith("gb")) {
return parse(sValue, lowerSValue, "gb", ByteSizeUnit.GB, settingName);
} else if (lowerSValue.endsWith("t")) {
return parse(sValue, lowerSValue, "t", ByteSizeUnit.TB, settingName);
} else if (lowerSValue.endsWith("tb")) {
return parse(sValue, lowerSValue, "tb", ByteSizeUnit.TB, settingName);
} else if (lowerSValue.endsWith("p")) {
return parse(sValue, lowerSValue, "p", ByteSizeUnit.PB, settingName);
} else if (lowerSValue.endsWith("pb")) {
return parse(sValue, lowerSValue, "pb", ByteSizeUnit.PB, settingName);
} else if (lowerSValue.endsWith("b")) {
return new ByteSizeValue(Long.parseLong(lowerSValue.substring(0, lowerSValue.length() - 1).trim()), ByteSizeUnit.BYTES);
} else if (lowerSValue.equals("-1")) {
// Allow this special value to be unit-less:
return new ByteSizeValue(-1, ByteSizeUnit.BYTES);
} else if (lowerSValue.equals("0")) {
// Allow this special value to be unit-less:
return new ByteSizeValue(0, ByteSizeUnit.BYTES);
} else {
// Missing units:
throw new ElasticsearchParseException(
"failed to parse setting [{}] with value [{}] as a size in bytes: unit is missing or unrecognized", settingName,
sValue);
}
}
private static ByteSizeValue parse(final String initialInput, final String normalized, final String suffix, ByteSizeUnit unit,
final String settingName) {
final String s = normalized.substring(0, normalized.length() - suffix.length()).trim();
try {
try {
return new ByteSizeValue(Long.parseLong(s), unit);
} catch (final NumberFormatException e) {
try {
final double doubleValue = Double.parseDouble(s);
DeprecationLoggerHolder.deprecationLogger
.deprecate(DeprecationCategory.PARSING, "fractional_byte_values",
"Fractional bytes values are deprecated. Use non-fractional bytes values instead: [{}] found for setting [{}]",
initialInput, settingName);
return new ByteSizeValue((long) (doubleValue * unit.toBytes(1)));
} catch (final NumberFormatException ignored) {
throw new ElasticsearchParseException("failed to parse setting [{}] with value [{}]", e, settingName, initialInput);
}
}
} catch (IllegalArgumentException e) {
throw new ElasticsearchParseException("failed to parse setting [{}] with value [{}] as a size in bytes", e, settingName,
initialInput);
}
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
return compareTo((ByteSizeValue) o) == 0;
}
@Override
public int hashCode() {
return Long.hashCode(size * unit.toBytes(1));
}
@Override
public int compareTo(ByteSizeValue other) {
long thisValue = size * unit.toBytes(1);
long otherValue = other.size * other.unit.toBytes(1);
return Long.compare(thisValue, otherValue);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return builder.value(toString());
}
}
| |
package apoc.load;
import apoc.util.TestUtil;
import org.apache.commons.lang.math.IntRange;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.helpers.collection.Iterators;
import org.neo4j.test.TestGraphDatabaseFactory;
import java.sql.Time;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.stream.IntStream;
import static apoc.util.MapUtil.map;
import static apoc.util.TestUtil.testCall;
import static apoc.util.TestUtil.testResult;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
public class XmlTest {
public static final String XML_AS_NESTED_MAP =
"{_type=parent, name=databases, " +
"_children=[" +
"{_type=child, name=Neo4j, _text=Neo4j is a graph database}, " +
"{_type=child, name=relational, _children=[" +
"{_type=grandchild, name=MySQL, _text=MySQL is a database & relational}, " +
"{_type=grandchild, name=Postgres, _text=Postgres is a relational database}]}]}";
public static final String XML_AS_NESTED_SIMPLE_MAP =
"{_type=parent, name=databases, " +
"_child=[" +
"{_type=child, name=Neo4j, _text=Neo4j is a graph database}, " +
"{_type=child, name=relational, _grandchild=[" +
"{_type=grandchild, name=MySQL, _text=MySQL is a database & relational}, " +
"{_type=grandchild, name=Postgres, _text=Postgres is a relational database}]}]}";
public static final String XML_XPATH_AS_NESTED_MAP =
"[{_type=book, id=bk103, _children=[{_type=author, _text=Corets, Eva}, {_type=title, _text=Maeve Ascendant}, {_type=genre, _text=Fantasy}, {_type=price, _text=5.95}, {_type=publish_date, _text=2000-11-17}, {_type=description, _text=After the collapse of a nanotechnology " +
"society in England, the young survivors lay the " +
"foundation for a new society.}]}]";
private GraphDatabaseService db;
@Before
public void setUp() throws Exception {
db = new TestGraphDatabaseFactory().newImpermanentDatabaseBuilder().setConfig("apoc.import.file.enabled", "true").newGraphDatabase();
TestUtil.registerProcedure(db, Xml.class);
}
@After
public void tearDown() {
db.shutdown();
}
@Test
public void testLoadXml() throws Exception {
testCall(db, "CALL apoc.load.xml('file:databases.xml')", // YIELD value RETURN value
(row) -> {
Object value = row.get("value");
assertEquals(XML_AS_NESTED_MAP, value.toString());
});
}
@Test
public void testLoadXmlSimple() throws Exception {
testCall(db, "CALL apoc.load.xmlSimple('file:databases.xml')", // YIELD value RETURN value
(row) -> {
Object value = row.get("value");
assertEquals(XML_AS_NESTED_SIMPLE_MAP, value.toString());
});
}
@Test
public void testMixedContent() {
testCall(db, "CALL apoc.load.xml('file:src/test/resources/mixedcontent.xml')", // YIELD value RETURN value
(row) -> {
Object value = row.get("value");
//assertEquals("{_type=root, _children=[{_type=text, _children=[text0, {_type=mixed}, text1]}, {_type=text, _text=text as cdata}]}", value.toString());
assertEquals("{_type=root, _children=[{_type=text, _children=[{_type=mixed}, text0, text1]}, {_type=text, _text=text as cdata}]}", value.toString());
});
}
@Test
public void testBookIds() {
testResult(db, "call apoc.load.xml('file:src/test/resources/books.xml') yield value as catalog\n" +
"UNWIND catalog._children as book\n" +
"RETURN book.id as id\n", result -> {
List<Object> ids = Iterators.asList(result.columnAs("id"));
assertTrue(IntStream.rangeClosed(1,12).allMatch(value -> ids.contains(String.format("bk1%02d",value))));
});
}
@Test
public void testFilterIntoCollection() {
testResult(db, "call apoc.load.xml('file:src/test/resources/books.xml') yield value as catalog\n" +
" UNWIND catalog._children as book\n" +
" RETURN book.id, [attr IN book._children WHERE attr._type IN ['author','title'] | [attr._type, attr._text]] as pairs"
, result -> {
assertEquals("+----------------------------------------------------------------------------------------------------------------+\n" +
"| book.id | pairs |\n" +
"+----------------------------------------------------------------------------------------------------------------+\n" +
"| \"bk101\" | [[\"author\",\"Gambardella, Matthew\"],[\"author\",\"Arciniegas, Fabio\"],[\"title\",\"XML Developer's Guide\"]] |\n" +
"| \"bk102\" | [[\"author\",\"Ralls, Kim\"],[\"title\",\"Midnight Rain\"]] |\n" +
"| \"bk103\" | [[\"author\",\"Corets, Eva\"],[\"title\",\"Maeve Ascendant\"]] |\n" +
"| \"bk104\" | [[\"author\",\"Corets, Eva\"],[\"title\",\"Oberon's Legacy\"]] |\n" +
"| \"bk105\" | [[\"author\",\"Corets, Eva\"],[\"title\",\"The Sundered Grail\"]] |\n" +
"| \"bk106\" | [[\"author\",\"Randall, Cynthia\"],[\"title\",\"Lover Birds\"]] |\n" +
"| \"bk107\" | [[\"author\",\"Thurman, Paula\"],[\"title\",\"Splish Splash\"]] |\n" +
"| \"bk108\" | [[\"author\",\"Knorr, Stefan\"],[\"title\",\"Creepy Crawlies\"]] |\n" +
"| \"bk109\" | [[\"author\",\"Kress, Peter\"],[\"title\",\"Paradox Lost\"]] |\n" +
"| \"bk110\" | [[\"author\",\"O'Brien, Tim\"],[\"title\",\"Microsoft .NET: The Programming Bible\"]] |\n" +
"| \"bk111\" | [[\"author\",\"O'Brien, Tim\"],[\"title\",\"MSXML3: A Comprehensive Guide\"]] |\n" +
"| \"bk112\" | [[\"author\",\"Galos, Mike\"],[\"title\",\"Visual Studio 7: A Comprehensive Guide\"]] |\n" +
"+----------------------------------------------------------------------------------------------------------------+\n" +
"12 rows\n", result.resultAsString());
});
}
@Test
public void testReturnCollectionElements() {
testResult(db, "call apoc.load.xml('file:src/test/resources/books.xml') yield value as catalog\n"+
"UNWIND catalog._children as book\n" +
"WITH book.id as id, [attr IN book._children WHERE attr._type IN ['author','title'] | attr._text] as pairs\n" +
"RETURN id, pairs[0] as author, pairs[1] as title"
, result -> {
assertEquals("+-----------------------------------------------------------------------------+\n" +
"| id | author | title |\n" +
"+-----------------------------------------------------------------------------+\n" +
"| \"bk101\" | \"Gambardella, Matthew\" | \"Arciniegas, Fabio\" |\n" +
"| \"bk102\" | \"Ralls, Kim\" | \"Midnight Rain\" |\n" +
"| \"bk103\" | \"Corets, Eva\" | \"Maeve Ascendant\" |\n" +
"| \"bk104\" | \"Corets, Eva\" | \"Oberon's Legacy\" |\n" +
"| \"bk105\" | \"Corets, Eva\" | \"The Sundered Grail\" |\n" +
"| \"bk106\" | \"Randall, Cynthia\" | \"Lover Birds\" |\n" +
"| \"bk107\" | \"Thurman, Paula\" | \"Splish Splash\" |\n" +
"| \"bk108\" | \"Knorr, Stefan\" | \"Creepy Crawlies\" |\n" +
"| \"bk109\" | \"Kress, Peter\" | \"Paradox Lost\" |\n" +
"| \"bk110\" | \"O'Brien, Tim\" | \"Microsoft .NET: The Programming Bible\" |\n" +
"| \"bk111\" | \"O'Brien, Tim\" | \"MSXML3: A Comprehensive Guide\" |\n" +
"| \"bk112\" | \"Galos, Mike\" | \"Visual Studio 7: A Comprehensive Guide\" |\n" +
"+-----------------------------------------------------------------------------+\n" +
"12 rows\n", result.resultAsString());
});
}
@Test
public void testLoadXmlXpathAuthorFromBookId () {
testCall(db, "CALL apoc.load.xml('file:src/test/resources/books.xml', '/catalog/book[@id=\"bk102\"]/author') yield value as result",
(r) -> {
assertEquals("author", ((Map) r.get("result")).get("_type"));
assertEquals("Ralls, Kim", ((Map) r.get("result")).get("_text"));
});
}
@Test
public void testLoadXmlXpathGenreFromBookTitle () {
testCall(db, "CALL apoc.load.xml('file:src/test/resources/books.xml', '/catalog/book[title=\"Maeve Ascendant\"]/genre') yield value as result",
(r) -> {
assertEquals("genre", ((Map) r.get("result")).get("_type"));
assertEquals("Fantasy", ((Map) r.get("result")).get("_text"));
});
}
@Test
public void testLoadXmlXpathReturnBookFromBookTitle () {
testCall(db, "CALL apoc.load.xml('file:src/test/resources/books.xml', '/catalog/book[title=\"Maeve Ascendant\"]/.') yield value as result",
(r) -> {
Object value = r.values();
assertEquals(XML_XPATH_AS_NESTED_MAP, value.toString());
});
}
@Test
public void testLoadXmlXpathBooKsFromGenre () {
testResult(db, "CALL apoc.load.xml('file:src/test/resources/books.xml', '/catalog/book[genre=\"Computer\"]') yield value as result",
(r) -> {
Map<String, Object> next = r.next();
Object result = next.get("result");
Map resultMap = (Map) next.get("result");
Object children = resultMap.get("_children");
List<Object> childrenList = (List<Object>) children;
assertEquals("bk101", ((Map) result).get("id"));
assertEquals("author", ((Map) childrenList.get(0)).get("_type"));
assertEquals("Gambardella, Matthew", ((Map) childrenList.get(0)).get("_text"));
assertEquals("author", ((Map) childrenList.get(1)).get("_type"));
assertEquals("Arciniegas, Fabio", ((Map) childrenList.get(1)).get("_text"));
next = r.next();
result = next.get("result");
resultMap = (Map) next.get("result");
children = resultMap.get("_children");
childrenList = (List<Object>) children;
assertEquals("bk110", ((Map) result).get("id"));
assertEquals("author", ((Map) childrenList.get(0)).get("_type"));
assertEquals("O'Brien, Tim", ((Map) childrenList.get(0)).get("_text"));
assertEquals("title", ((Map) childrenList.get(1)).get("_type"));
assertEquals("Microsoft .NET: The Programming Bible", ((Map) childrenList.get(1)).get("_text"));
next = r.next();
result = next.get("result");
resultMap = (Map) next.get("result");
children = resultMap.get("_children");
childrenList = (List<Object>) children;
assertEquals("bk111", ((Map) result).get("id"));
assertEquals("author", ((Map) childrenList.get(0)).get("_type"));
assertEquals("O'Brien, Tim", ((Map) childrenList.get(0)).get("_text"));
assertEquals("title", ((Map) childrenList.get(1)).get("_type"));
assertEquals("MSXML3: A Comprehensive Guide", ((Map) childrenList.get(1)).get("_text"));
next = r.next();
result = next.get("result");
resultMap = (Map) next.get("result");
children = resultMap.get("_children");
childrenList = (List<Object>) children;
assertEquals("bk112", ((Map) result).get("id"));
assertEquals("author", ((Map) childrenList.get(0)).get("_type"));
assertEquals("Galos, Mike", ((Map) childrenList.get(0)).get("_text"));
assertEquals("title", ((Map) childrenList.get(1)).get("_type"));
assertEquals("Visual Studio 7: A Comprehensive Guide", ((Map) childrenList.get(1)).get("_text"));
assertEquals(false, r.hasNext());
});
}
@Test
public void testLoadXmlNoFailOnError () {
testCall(db, "CALL apoc.load.xml('file:src/test/resources/books.xm', '', {failOnError:false}) yield value as result",
(r) -> {
Map resultMap = (Map) r.get("result");
assertEquals(Collections.emptyMap(), resultMap);
});
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.view;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.carbondata.common.annotations.InterfaceAudience;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.locks.ICarbonLock;
import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.metadata.schema.table.RelationIdentifier;
import org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
import org.apache.carbondata.core.statusmanager.SegmentStatus;
import org.apache.carbondata.core.statusmanager.SegmentStatusManager;
import org.apache.carbondata.core.util.path.CarbonTablePath;
import com.google.gson.Gson;
import org.apache.log4j.Logger;
/**
* It maintains all the mv schemas in it.
*/
@InterfaceAudience.Internal
public abstract class MVManager {
private static final Logger LOGGER =
LogServiceFactory.getLogService(MVManager.class.getName());
private final MVProvider schemaProvider = new MVProvider();
private volatile MVCatalog<?> catalog;
private final Object lock = new Object();
public MVManager() {
}
public abstract List<String> getDatabases();
public abstract String getDatabaseLocation(String databaseName);
public boolean hasSchemaOnTable(CarbonTable table) {
return !table.getMVTablesMap().isEmpty();
}
public boolean isMVInSyncWithParentTables(MVSchema mvSchema) throws IOException {
return schemaProvider.isViewCanBeEnabled(mvSchema, true);
}
/**
* It gives all mv schemas of a given table.
* For show mv command.
*/
public List<MVSchema> getSchemasOnTable(CarbonTable table)
throws IOException {
return getSchemas(table.getMVTablesMap());
}
/**
* It gives all mv schemas of a given table.
* For show mv command.
*/
public List<MVSchema> getSchemasOnTable(String databaseName,
CarbonTable carbonTable) throws IOException {
return schemaProvider.getSchemas(this, databaseName, carbonTable);
}
/**
* It gives all mv schemas from store.
*/
public List<MVSchema> getSchemas() throws IOException {
List<MVSchema> schemas = new ArrayList<>();
for (String database : this.getDatabases()) {
try {
schemas.addAll(this.getSchemas(database));
} catch (IOException ex) {
throw ex;
} catch (Exception ex) {
LOGGER.error("Exception Occurred: Skipping MV schemas from database: " + database);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(ex.getMessage());
}
}
}
return schemas;
}
/**
* It gives all mv schemas from given databases in the store
*/
public List<MVSchema> getSchemas(Map<String, List<String>> mvTablesMap) throws IOException {
List<MVSchema> schemas = new ArrayList<>();
for (Map.Entry<String, List<String>> databaseEntry : mvTablesMap.entrySet()) {
String database = databaseEntry.getKey();
List<String> mvTables = databaseEntry.getValue();
for (String mvTable : mvTables) {
try {
schemas.add(this.getSchema(database, mvTable));
} catch (IOException ex) {
LOGGER.error("Error while fetching MV schema " + mvTable + " from database: " + database);
throw ex;
} catch (Exception ex) {
LOGGER.error(
"Exception Occurred: Skipping MV schema " + mvTable + " from database: " + database);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(ex.getMessage());
}
}
}
}
return schemas;
}
/**
* It gives all mv schemas from store.
*/
public List<MVSchema> getSchemas(String databaseName) throws IOException {
return schemaProvider.getSchemas(this, databaseName);
}
public MVSchema getSchema(String databaseName, String viewName) throws IOException {
return schemaProvider.getSchema(this, databaseName, viewName, false);
}
public MVSchema getSchema(String databaseName, String viewName, boolean isRegisterMV)
throws IOException {
return schemaProvider.getSchema(this, databaseName, viewName, isRegisterMV);
}
/**
* Saves the mv schema to storage
*
* @param viewSchema mv schema
*/
public void createSchema(String databaseName, MVSchema viewSchema)
throws IOException {
schemaProvider.saveSchema(this, databaseName, viewSchema);
}
/**
* Drops the mv schema from storage
*
* @param viewName mv name
*/
public void deleteSchema(String databaseName, String viewName) throws IOException {
schemaProvider.dropSchema(this, databaseName, viewName);
}
/**
* Get the mv catalog.
*/
public MVCatalog<?> getCatalog() {
return catalog;
}
/**
* Get the mv catalog.
*/
public MVCatalog<?> getCatalog(
MVCatalogFactory<?> catalogFactory,
List<MVSchema> currSchemas) throws IOException {
MVCatalog<?> catalog = this.catalog;
synchronized (lock) {
catalog = this.catalog;
if (catalog == null) {
catalog = catalogFactory.newCatalog();
}
List<MVSchema> schemas = getSchemas();
if (schemas.size() == currSchemas.size() && currSchemas.containsAll(schemas)) {
return catalog;
}
for (MVSchema schema : schemas) {
try {
// register the schemas that are not already present in catalog.
if (!currSchemas.contains(schema)) {
catalog.registerSchema(schema);
}
} catch (Exception e) {
// Ignore the schema
LOGGER.error(
"Error while registering schema for mv: " + schema.getIdentifier().getTableName());
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(e.getMessage());
}
}
}
for (MVSchema currSchema : currSchemas) {
try {
// deregister the schemas from catalog if not present in the path.
if (!schemas.contains(currSchema)) {
catalog.deregisterSchema(currSchema.getIdentifier());
}
} catch (Exception e) {
// Ignore the schema
LOGGER.error("Error while deregistering schema for mv: " + currSchema.getIdentifier()
.getTableName());
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(e.getMessage());
}
}
}
this.catalog = catalog;
}
return catalog;
}
/**
* In case of compaction on mv table,this method will merge the segment list of main table
* and return updated segment mapping
*
* @param mergedLoadName to find which all segments are merged to new compacted segment
* @param viewSchema of mv table
* @param viewLoadMetadataDetails of mv table
* @return updated segment map after merging segment list
*/
@SuppressWarnings("unchecked")
public static String getUpdatedSegmentMap(String mergedLoadName,
MVSchema viewSchema,
LoadMetadataDetails[] viewLoadMetadataDetails) {
Map<String, List<String>> segmentMapping = new HashMap<>();
List<RelationIdentifier> relationIdentifiers = viewSchema.getRelatedTables();
for (RelationIdentifier relationIdentifier : relationIdentifiers) {
for (LoadMetadataDetails loadMetadataDetail : viewLoadMetadataDetails) {
if (loadMetadataDetail.getSegmentStatus() == SegmentStatus.COMPACTED) {
if (mergedLoadName.equalsIgnoreCase(loadMetadataDetail.getMergedLoadName())) {
Map segmentMap = new Gson().fromJson(loadMetadataDetail.getExtraInfo(), Map.class);
if (segmentMapping.isEmpty()) {
segmentMapping.putAll(segmentMap);
} else {
segmentMapping.get(relationIdentifier.getDatabaseName() + CarbonCommonConstants.POINT
+ relationIdentifier.getTableName()).addAll(
(List<String>) segmentMap.get(
relationIdentifier.getDatabaseName() + CarbonCommonConstants.POINT
+ relationIdentifier.getTableName()));
}
}
}
}
}
Gson gson = new Gson();
return gson.toJson(segmentMapping);
}
/**
* Get enabled mv status details
*/
public List<MVStatusDetail> getEnabledStatusDetails() throws IOException {
List<MVStatusDetail> statusDetails = new ArrayList<>();
for (String database : this.getDatabases()) {
try {
statusDetails.addAll(this.getEnabledStatusDetails(database));
} catch (IOException ex) {
throw ex;
} catch (Exception ex) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(ex.getMessage());
}
}
}
return statusDetails;
}
/**
* Get enabled mv status details
*/
List<MVStatusDetail> getEnabledStatusDetails(String databaseName)
throws IOException {
List<MVStatusDetail> statusDetails = schemaProvider.getStatusDetails(this, databaseName);
List<MVStatusDetail> enabledStatusDetails = new ArrayList<>(statusDetails.size());
for (MVStatusDetail statusDetail : statusDetails) {
if (statusDetail.getStatus() == MVStatus.ENABLED) {
enabledStatusDetails.add(statusDetail);
}
}
return enabledStatusDetails;
}
public void setStatus(RelationIdentifier viewIdentifier, MVStatus viewStatus)
throws IOException {
MVSchema schema = getSchema(
viewIdentifier.getDatabaseName(), viewIdentifier.getTableName());
if (schema != null) {
schemaProvider.updateStatus(this, Collections.singletonList(schema), viewStatus);
}
}
public void setStatus(List<MVSchema> viewSchemas, MVStatus viewStatus)
throws IOException {
if (viewSchemas != null && !viewSchemas.isEmpty()) {
schemaProvider.updateStatus(this, viewSchemas, viewStatus);
}
}
public void onDrop(String databaseName, String viewName)
throws IOException {
MVSchema viewSchema = getSchema(databaseName, viewName);
if (viewSchema != null) {
schemaProvider.updateStatus(
this, Collections.singletonList(viewSchema), MVStatus.DROPPED);
}
}
/**
* This method will remove all segments of MV table in case of Insert-Overwrite/Update/Delete
* operations on main table
*
* @param schemas mv schemas
*/
public void onTruncate(List<MVSchema> schemas)
throws IOException {
for (MVSchema schema : schemas) {
if (!schema.isRefreshOnManual()) {
setStatus(schema.identifier, MVStatus.DISABLED);
}
RelationIdentifier relationIdentifier = schema.getIdentifier();
SegmentStatusManager segmentStatusManager = new SegmentStatusManager(AbsoluteTableIdentifier
.from(relationIdentifier.getTablePath(),
relationIdentifier.getDatabaseName(),
relationIdentifier.getTableName()));
ICarbonLock carbonLock = segmentStatusManager.getTableStatusLock();
try {
if (carbonLock.lockWithRetries()) {
LOGGER.info("Acquired lock for table" + relationIdentifier.getDatabaseName() + "."
+ relationIdentifier.getTableName() + " for table status update");
String metaDataPath =
CarbonTablePath.getMetadataPath(relationIdentifier.getTablePath());
LoadMetadataDetails[] loadMetadataDetails =
SegmentStatusManager.readLoadMetadata(metaDataPath);
for (LoadMetadataDetails entry : loadMetadataDetails) {
entry.setSegmentStatus(SegmentStatus.MARKED_FOR_DELETE);
}
SegmentStatusManager.writeLoadDetailsIntoFile(
CarbonTablePath.getTableStatusFilePath(relationIdentifier.getTablePath()),
loadMetadataDetails);
} else {
LOGGER.error("Not able to acquire the lock for Table status update for table "
+ relationIdentifier.getDatabaseName() + "." + relationIdentifier
.getTableName());
}
} finally {
if (carbonLock.unlock()) {
LOGGER.info(
"Table unlocked successfully after table status update" + relationIdentifier
.getDatabaseName() + "." + relationIdentifier.getTableName());
} else {
LOGGER.error(
"Unable to unlock Table lock for table" + relationIdentifier.getDatabaseName()
+ "." + relationIdentifier.getTableName()
+ " during table status update");
}
}
}
}
}
| |
package org.pentaho.di.trans.steps.sftpscan;
import org.pentaho.di.core.annotations.Step;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.encryption.Encr;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.*;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
import java.util.List;
/**
* The SFTP Plugin meta class
*/
@Step(
id = "SftpScanStep",
image = "icon.png",
i18nPackageName = "org.pentaho.di.trans.steps.sftpscan",
name = "SftpScan.Name.Default",
description = "SftpScan.Name.Desc",
categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.Utility"
)
public class SFTPScanMeta extends BaseStepMeta implements StepMetaInterface {
private String serverName;
private String serverPort;
private String userName;
private String password;
private String sftpDirectory;
private String wildcard;
private boolean usekeyfilename;
private boolean doRecursiveScan;
private String keyfilename;
private String keyfilepass;
private String compression;
// proxy
private String proxyType;
private String proxyHost;
private String proxyPort;
private String proxyUsername;
private String proxyPassword;
@Override
public void setDefault() {
serverPort = "22";
usekeyfilename = false;
compression = "none";
doRecursiveScan = false;
}
@Override
public StepInterface getStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans) {
return new SFTPScan(stepMeta, stepDataInterface, cnr, transMeta, trans);
}
@Override
public StepDataInterface getStepData() {
return new SFTPScanData();
}
@Override
public void getFields(RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore) throws KettleStepException {
RowMetaInterface fields = new RowMeta();
fields.addRowMeta( inputRowMeta );
ValueMetaInterface fileFolder = new ValueMeta("file_folder", ValueMeta.TYPE_STRING);
fileFolder.setLength(255);
fileFolder.setPrecision(-1);
fileFolder.setOrigin(name);
fields.addValueMeta(fileFolder);
ValueMetaInterface fileName = new ValueMeta("file_name", ValueMeta.TYPE_STRING);
fileName.setLength(255);
fileName.setPrecision(-1);
fileName.setOrigin(name);
fields.addValueMeta(fileName);
ValueMetaInterface permissions = new ValueMeta("permissions", ValueMeta.TYPE_STRING);
permissions.setLength(8);
permissions.setPrecision(-1);
permissions.setOrigin(name);
fields.addValueMeta(permissions);
ValueMetaInterface size = new ValueMeta("size", ValueMeta.TYPE_INTEGER);
size.setOrigin(name);
fields.addValueMeta(size);
ValueMetaInterface uid = new ValueMeta("uid", ValueMeta.TYPE_INTEGER);
uid.setOrigin(name);
fields.addValueMeta(uid);
ValueMetaInterface gid = new ValueMeta("gid", ValueMeta.TYPE_INTEGER);
gid.setOrigin(name);
fields.addValueMeta(gid);
ValueMetaInterface access_date = new ValueMeta("access_date", ValueMeta.TYPE_DATE);
access_date.setOrigin(name);
fields.addValueMeta(access_date);
ValueMetaInterface modification_date = new ValueMeta("modification_date", ValueMeta.TYPE_DATE);
modification_date.setOrigin(name);
fields.addValueMeta(modification_date);
inputRowMeta.clear();
inputRowMeta.addRowMeta(fields);
}
@Override
public String getXML() throws KettleException {
StringBuffer retval = new StringBuffer(200);
retval.append(super.getXML());
retval.append(" ").append(XMLHandler.addTagValue("servername", serverName));
retval.append(" ").append(XMLHandler.addTagValue("serverport", serverPort));
retval.append(" ").append(XMLHandler.addTagValue("username", userName));
retval.append(" ").append(
XMLHandler.addTagValue("password", Encr.encryptPasswordIfNotUsingVariables(getPassword())));
retval.append(" ").append(XMLHandler.addTagValue("sftpdirectory", sftpDirectory));
retval.append(" ").append(XMLHandler.addTagValue("wildcard", wildcard));
retval.append(" ").append(XMLHandler.addTagValue("usekeyfilename", usekeyfilename));
retval.append(" ").append(XMLHandler.addTagValue("doRecursiveScan", doRecursiveScan));
retval.append(" ").append(XMLHandler.addTagValue("keyfilename", keyfilename));
retval.append(" ").append(
XMLHandler.addTagValue("keyfilepass", Encr.encryptPasswordIfNotUsingVariables(keyfilepass)));
retval.append(" ").append(XMLHandler.addTagValue("compression", compression));
retval.append(" ").append(XMLHandler.addTagValue("proxyType", proxyType));
retval.append(" ").append(XMLHandler.addTagValue("proxyHost", proxyHost));
retval.append(" ").append(XMLHandler.addTagValue("proxyPort", proxyPort));
retval.append(" ").append(XMLHandler.addTagValue("proxyUsername", proxyUsername));
retval.append(" ").append(
XMLHandler.addTagValue("proxyPassword", Encr.encryptPasswordIfNotUsingVariables(proxyPassword)));
return retval.toString();
}
@Override
public void loadXML(Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore) throws KettleXMLException {
try {
super.loadXML(stepnode, databases, metaStore);
serverName = XMLHandler.getTagValue(stepnode, "servername");
serverPort = XMLHandler.getTagValue(stepnode, "serverport");
userName = XMLHandler.getTagValue(stepnode, "username");
password = Encr.decryptPasswordOptionallyEncrypted(XMLHandler.getTagValue(stepnode, "password"));
sftpDirectory = XMLHandler.getTagValue(stepnode, "sftpdirectory");
wildcard = XMLHandler.getTagValue(stepnode, "wildcard");
usekeyfilename = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "usekeyfilename"));
doRecursiveScan = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "doRecursiveScan"));
keyfilename = XMLHandler.getTagValue(stepnode, "keyfilename");
keyfilepass = Encr.decryptPasswordOptionallyEncrypted(XMLHandler.getTagValue(stepnode, "keyfilepass"));
compression = XMLHandler.getTagValue(stepnode, "compression");
proxyType = XMLHandler.getTagValue(stepnode, "proxyType");
proxyHost = XMLHandler.getTagValue(stepnode, "proxyHost");
proxyPort = XMLHandler.getTagValue(stepnode, "proxyPort");
proxyUsername = XMLHandler.getTagValue(stepnode, "proxyUsername");
proxyPassword =
Encr.decryptPasswordOptionallyEncrypted(XMLHandler.getTagValue(stepnode, "proxyPassword"));
} catch (KettleXMLException xe) {
throw new KettleXMLException("Unable to load job entry of type 'SftpScanStep' from XML node", xe);
}
}
public String getServerName() {
return serverName;
}
public void setServerName(String serverName) {
this.serverName = serverName;
}
public String getServerPort() {
return serverPort;
}
public void setServerPort(String serverPort) {
this.serverPort = serverPort;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getSftpDirectory() {
return sftpDirectory;
}
public void setSftpDirectory(String sftpDirectory) {
this.sftpDirectory = sftpDirectory;
}
public String getWildcard() {
return wildcard;
}
public void setWildcard(String wildcard) {
this.wildcard = wildcard;
}
public boolean isUsekeyfilename() {
return usekeyfilename;
}
public void setUsekeyfilename(boolean usekeyfilename) {
this.usekeyfilename = usekeyfilename;
}
public boolean isDoRecursiveScan() {
return doRecursiveScan;
}
public void setDoRecursiveScan(boolean doRecursiveScan) {
this.doRecursiveScan = doRecursiveScan;
}
public String getKeyfilename() {
return keyfilename;
}
public void setKeyfilename(String keyfilename) {
this.keyfilename = keyfilename;
}
public String getKeyfilepass() {
return keyfilepass;
}
public void setKeyfilepass(String keyfilepass) {
this.keyfilepass = keyfilepass;
}
public String getCompression() {
return compression;
}
public void setCompression(String compression) {
this.compression = compression;
}
public String getProxyType() {
return proxyType;
}
public void setProxyType(String proxyType) {
this.proxyType = proxyType;
}
public String getProxyHost() {
return proxyHost;
}
public void setProxyHost(String proxyHost) {
this.proxyHost = proxyHost;
}
public String getProxyPort() {
return proxyPort;
}
public void setProxyPort(String proxyPort) {
this.proxyPort = proxyPort;
}
public String getProxyUsername() {
return proxyUsername;
}
public void setProxyUsername(String proxyUsername) {
this.proxyUsername = proxyUsername;
}
public String getProxyPassword() {
return proxyPassword;
}
public void setProxyPassword(String proxyPassword) {
this.proxyPassword = proxyPassword;
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.corext.codemanipulation;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import org.eclipse.jface.text.Region;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.dom.AST;
import org.eclipse.jdt.core.dom.ASTNode;
import org.eclipse.jdt.core.dom.ArrayType;
import org.eclipse.jdt.core.dom.ClassInstanceCreation;
import org.eclipse.jdt.core.dom.CompilationUnit;
import org.eclipse.jdt.core.dom.Expression;
import org.eclipse.jdt.core.dom.FieldAccess;
import org.eclipse.jdt.core.dom.IBinding;
import org.eclipse.jdt.core.dom.IMethodBinding;
import org.eclipse.jdt.core.dom.ITypeBinding;
import org.eclipse.jdt.core.dom.IVariableBinding;
import org.eclipse.jdt.core.dom.ImportDeclaration;
import org.eclipse.jdt.core.dom.MarkerAnnotation;
import org.eclipse.jdt.core.dom.MemberRef;
import org.eclipse.jdt.core.dom.MethodDeclaration;
import org.eclipse.jdt.core.dom.MethodInvocation;
import org.eclipse.jdt.core.dom.MethodRef;
import org.eclipse.jdt.core.dom.MethodRefParameter;
import org.eclipse.jdt.core.dom.Modifier;
import org.eclipse.jdt.core.dom.Name;
import org.eclipse.jdt.core.dom.NormalAnnotation;
import org.eclipse.jdt.core.dom.PackageDeclaration;
import org.eclipse.jdt.core.dom.QualifiedName;
import org.eclipse.jdt.core.dom.QualifiedType;
import org.eclipse.jdt.core.dom.SimpleName;
import org.eclipse.jdt.core.dom.SimpleType;
import org.eclipse.jdt.core.dom.SingleMemberAnnotation;
import org.eclipse.jdt.core.dom.SuperConstructorInvocation;
import org.eclipse.jdt.core.dom.TagElement;
import org.eclipse.jdt.core.dom.ThisExpression;
import org.eclipse.jdt.core.dom.TypeDeclaration;
import org.eclipse.jdt.internal.corext.dom.GenericVisitor;
import org.eclipse.jdt.internal.corext.dom.ScopeAnalyzer;
import org.eclipse.jdt.internal.corext.util.JavaModelUtil;
public class ImportReferencesCollector extends GenericVisitor {
public static void collect(ASTNode node, IJavaProject project, Region rangeLimit, Collection<SimpleName> resultingTypeImports, Collection<SimpleName> resultingStaticImports) {
collect(node, project, rangeLimit, false, resultingTypeImports, resultingStaticImports);
}
public static void collect(ASTNode node, IJavaProject project, Region rangeLimit, boolean skipMethodBodies, Collection<SimpleName> resultingTypeImports, Collection<SimpleName> resultingStaticImports) {
ASTNode root= node.getRoot();
CompilationUnit astRoot= root instanceof CompilationUnit ? (CompilationUnit) root : null;
node.accept(new ImportReferencesCollector(project, astRoot, rangeLimit, skipMethodBodies, resultingTypeImports, resultingStaticImports));
}
private CompilationUnit fASTRoot;
private Region fSubRange;
private Collection<SimpleName> fTypeImports;
private Collection<SimpleName> fStaticImports;
private boolean fSkipMethodBodies;
private ImportReferencesCollector(IJavaProject project, CompilationUnit astRoot, Region rangeLimit, boolean skipMethodBodies, Collection<SimpleName> resultingTypeImports, Collection<SimpleName> resultingStaticImports) {
super(processJavadocComments(astRoot));
fTypeImports= resultingTypeImports;
fStaticImports= resultingStaticImports;
fSubRange= rangeLimit;
if (project == null || !JavaModelUtil.is50OrHigher(project)) {
fStaticImports= null; // do not collect
}
fASTRoot= astRoot; // can be null
fSkipMethodBodies= skipMethodBodies;
}
private static boolean processJavadocComments(CompilationUnit astRoot) {
// don't visit Javadoc for 'package-info' (bug 216432)
if (astRoot != null && astRoot.getTypeRoot() != null) {
return !"package-info.java".equals(astRoot.getTypeRoot().getElementName()); //$NON-NLS-1$
}
return true;
}
private boolean isAffected(ASTNode node) {
if (fSubRange == null) {
return true;
}
int nodeStart= node.getStartPosition();
int offset= fSubRange.getOffset();
return nodeStart + node.getLength() > offset && offset + fSubRange.getLength() > nodeStart;
}
private void addReference(SimpleName name) {
if (isAffected(name)) {
fTypeImports.add(name);
}
}
private void typeRefFound(Name node) {
if (node != null) {
while (node.isQualifiedName()) {
node= ((QualifiedName) node).getQualifier();
}
addReference((SimpleName) node);
}
}
private void possibleTypeRefFound(Name node) {
while (node.isQualifiedName()) {
node= ((QualifiedName) node).getQualifier();
}
IBinding binding= node.resolveBinding();
if (binding == null || binding.getKind() == IBinding.TYPE) {
// if the binding is null, we cannot determine if
// we have a type binding or not, so we will assume
// we do.
addReference((SimpleName) node);
}
}
private void possibleStaticImportFound(Name name) {
if (fStaticImports == null || fASTRoot == null) {
return;
}
while (name.isQualifiedName()) {
name= ((QualifiedName) name).getQualifier();
}
if (!isAffected(name)) {
return;
}
IBinding binding= name.resolveBinding();
SimpleName simpleName= (SimpleName)name;
if (binding == null || binding instanceof ITypeBinding || !Modifier.isStatic(binding.getModifiers()) || simpleName.isDeclaration()) {
return;
}
if (binding instanceof IVariableBinding) {
IVariableBinding varBinding= (IVariableBinding) binding;
if (varBinding.isField()) {
varBinding= varBinding.getVariableDeclaration();
ITypeBinding declaringClass= varBinding.getDeclaringClass();
if (declaringClass != null && !declaringClass.isLocal()) {
if (new ScopeAnalyzer(fASTRoot).isDeclaredInScope(varBinding, simpleName, ScopeAnalyzer.VARIABLES | ScopeAnalyzer.CHECK_VISIBILITY))
return;
fStaticImports.add(simpleName);
}
}
} else if (binding instanceof IMethodBinding) {
IMethodBinding methodBinding= ((IMethodBinding) binding).getMethodDeclaration();
ITypeBinding declaringClass= methodBinding.getDeclaringClass();
if (declaringClass != null && !declaringClass.isLocal()) {
if (new ScopeAnalyzer(fASTRoot).isDeclaredInScope(methodBinding, simpleName, ScopeAnalyzer.METHODS | ScopeAnalyzer.CHECK_VISIBILITY))
return;
fStaticImports.add(simpleName);
}
}
}
private void doVisitChildren(List<? extends ASTNode> elements) {
int nElements= elements.size();
for (int i= 0; i < nElements; i++) {
((ASTNode) elements.get(i)).accept(this);
}
}
private void doVisitNode(ASTNode node) {
if (node != null) {
node.accept(this);
}
}
/* (non-Javadoc)
* @see org.eclipse.jdt.internal.corext.dom.GenericVisitor#visitNode(org.eclipse.jdt.core.dom.ASTNode)
*/
@Override
protected boolean visitNode(ASTNode node) {
return isAffected(node);
}
/*
* @see ASTVisitor#visit(ArrayType)
*/
@Override
public boolean visit(ArrayType node) {
doVisitNode(node.getElementType());
return false;
}
/*
* @see ASTVisitor#visit(SimpleType)
*/
@Override
public boolean visit(SimpleType node) {
typeRefFound(node.getName());
return false;
}
/*
* @see ASTVisitor#visit(QualifiedType)
*/
@Override
public boolean visit(QualifiedType node) {
// nothing to do here, let the qualifier be visited
return true;
}
/*
* @see ASTVisitor#visit(QualifiedName)
*/
@Override
public boolean visit(QualifiedName node) {
possibleTypeRefFound(node); // possible ref
possibleStaticImportFound(node);
return false;
}
/*
* @see ASTVisitor#visit(ImportDeclaration)
*/
@Override
public boolean visit(ImportDeclaration node) {
return false;
}
/*
* @see ASTVisitor#visit(PackageDeclaration)
*/
@Override
public boolean visit(PackageDeclaration node) {
if (node.getAST().apiLevel() >= AST.JLS3) {
doVisitNode(node.getJavadoc());
doVisitChildren(node.annotations());
}
return false;
}
/*
* @see ASTVisitor#visit(ThisExpression)
*/
@Override
public boolean visit(ThisExpression node) {
typeRefFound(node.getQualifier());
return false;
}
private void evalQualifyingExpression(Expression expr, Name selector) {
if (expr != null) {
if (expr instanceof Name) {
Name name= (Name) expr;
possibleTypeRefFound(name);
possibleStaticImportFound(name);
} else {
expr.accept(this);
}
} else if (selector != null) {
possibleStaticImportFound(selector);
}
}
/*
* @see ASTVisitor#visit(ClassInstanceCreation)
*/
@Override
public boolean visit(ClassInstanceCreation node) {
doVisitChildren(node.typeArguments());
doVisitNode(node.getType());
evalQualifyingExpression(node.getExpression(), null);
if (node.getAnonymousClassDeclaration() != null) {
node.getAnonymousClassDeclaration().accept(this);
}
doVisitChildren(node.arguments());
return false;
}
/*
* @see ASTVisitor#endVisit(MethodInvocation)
*/
@Override
public boolean visit(MethodInvocation node) {
evalQualifyingExpression(node.getExpression(), node.getName());
doVisitChildren(node.typeArguments());
doVisitChildren(node.arguments());
return false;
}
/*
* @see ASTVisitor#visit(SuperConstructorInvocation)
*/
@Override
public boolean visit(SuperConstructorInvocation node) {
if (!isAffected(node)) {
return false;
}
evalQualifyingExpression(node.getExpression(), null);
doVisitChildren(node.typeArguments());
doVisitChildren(node.arguments());
return false;
}
/*
* @see ASTVisitor#visit(FieldAccess)
*/
@Override
public boolean visit(FieldAccess node) {
evalQualifyingExpression(node.getExpression(), node.getName());
return false;
}
/*
* @see ASTVisitor#visit(SimpleName)
*/
@Override
public boolean visit(SimpleName node) {
// if the call gets here, it can only be a variable reference
possibleStaticImportFound(node);
return false;
}
/* (non-Javadoc)
* @see org.eclipse.jdt.internal.corext.dom.GenericVisitor#visit(org.eclipse.jdt.core.dom.MarkerAnnotation)
*/
@Override
public boolean visit(MarkerAnnotation node) {
typeRefFound(node.getTypeName());
return false;
}
/* (non-Javadoc)
* @see org.eclipse.jdt.internal.corext.dom.GenericVisitor#visit(org.eclipse.jdt.core.dom.MarkerAnnotation)
*/
@Override
public boolean visit(NormalAnnotation node) {
typeRefFound(node.getTypeName());
doVisitChildren(node.values());
return false;
}
/* (non-Javadoc)
* @see org.eclipse.jdt.internal.corext.dom.GenericVisitor#visit(org.eclipse.jdt.core.dom.MarkerAnnotation)
*/
@Override
public boolean visit(SingleMemberAnnotation node) {
typeRefFound(node.getTypeName());
doVisitNode(node.getValue());
return false;
}
/*
* @see ASTVisitor#visit(TypeDeclaration)
*/
@Override
public boolean visit(TypeDeclaration node) {
if (!isAffected(node)) {
return false;
}
return true;
}
/*
* @see ASTVisitor#visit(MethodDeclaration)
*/
@Override
public boolean visit(MethodDeclaration node) {
if (!isAffected(node)) {
return false;
}
doVisitNode(node.getJavadoc());
if (node.getAST().apiLevel() >= AST.JLS3) {
doVisitChildren(node.modifiers());
doVisitChildren(node.typeParameters());
}
if (!node.isConstructor()) {
doVisitNode(node.getReturnType2());
}
doVisitChildren(node.parameters());
Iterator<Name> iter=node.thrownExceptions().iterator();
while (iter.hasNext()) {
typeRefFound(iter.next());
}
if (!fSkipMethodBodies) {
doVisitNode(node.getBody());
}
return false;
}
@Override
public boolean visit(TagElement node) {
String tagName= node.getTagName();
List<? extends ASTNode> list= node.fragments();
int idx= 0;
if (tagName != null && !list.isEmpty()) {
Object first= list.get(0);
if (first instanceof Name) {
if ("@throws".equals(tagName) || "@exception".equals(tagName)) { //$NON-NLS-1$//$NON-NLS-2$
typeRefFound((Name) first);
} else if ("@see".equals(tagName) || "@link".equals(tagName) || "@linkplain".equals(tagName)) { //$NON-NLS-1$//$NON-NLS-2$ //$NON-NLS-3$
Name name= (Name) first;
possibleTypeRefFound(name);
}
idx++;
}
}
for (int i= idx; i < list.size(); i++) {
doVisitNode(list.get(i));
}
return false;
}
@Override
public boolean visit(MemberRef node) {
Name qualifier= node.getQualifier();
if (qualifier != null) {
typeRefFound(qualifier);
}
return false;
}
@Override
public boolean visit(MethodRef node) {
Name qualifier= node.getQualifier();
if (qualifier != null) {
typeRefFound(qualifier);
}
List<MethodRefParameter> list= node.parameters();
if (list != null) {
doVisitChildren(list); // visit MethodRefParameter with Type
}
return false;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.reef.tang.util;
import java.util.*;
import java.util.Map.Entry;
public abstract class AbstractMonotonicMultiMap<K, V> implements Collection<Entry<K, V>> {
protected Map<K, Set<V>> map;
private int size = 0;
public AbstractMonotonicMultiMap(Map<K, Set<V>> map) {
this.map = map;
}
public void put(K key, V v) {
Set<V> vals = map.get(key);
if (vals == null) {
vals = new MonotonicHashSet<V>();
map.put(key, vals);
}
vals.add(v);
size++;
}
public Set<K> keySet() {
return map.keySet();
}
public Set<V> getValuesForKey(K key) {
Set<V> ret = map.get(key);
if (ret == null) {
return new MonotonicHashSet<V>();
} else {
return ret;
}
}
public boolean contains(K key, V v) {
Set<V> vals = map.get(key);
if (vals != null) {
return vals.contains(v);
}
return false;
}
@Override
public boolean add(Entry<K, V> e) {
put(e.getKey(), e.getValue());
return true;
}
@Override
public boolean addAll(Collection<? extends Entry<K, V>> c) {
boolean ret = false;
for (Entry<K, V> e : c) {
add(e);
ret = true;
}
return ret;
}
@Override
public void clear() {
throw new UnsupportedOperationException("MonotonicMultiMap cannot be cleared!");
}
@SuppressWarnings("unchecked")
@Override
public boolean contains(Object o) {
Entry<?, ?> e = (Entry<?, ?>) o;
return contains((K) e.getKey(), (V) e.getValue());
}
@Override
public boolean containsAll(Collection<?> c) {
for (Object o : c) {
if (!contains(o)) {
return false;
}
}
return true;
}
@Override
public boolean isEmpty() {
return size == 0;
}
@Override
public Iterator<Entry<K, V>> iterator() {
final Iterator<Entry<K, Set<V>>> it = map.entrySet().iterator();
return new Iterator<Entry<K, V>>() {
Iterator<V> cur;
K curKey;
@Override
public boolean hasNext() {
return it.hasNext() || (cur != null && cur.hasNext());
}
@Override
public Entry<K, V> next() {
if (cur == null) {
if (it.hasNext()) {
Entry<K, Set<V>> e = it.next();
curKey = e.getKey();
cur = e.getValue().iterator();
}
}
final K k = curKey;
final V v = cur.next();
if (!cur.hasNext()) {
cur = null;
}
return new Entry<K, V>() {
@Override
public K getKey() {
return k;
}
@Override
public V getValue() {
return v;
}
@Override
public V setValue(V value) {
throw new UnsupportedOperationException();
}
};
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
public Set<V> values() {
Set<V> s = new HashSet<>();
for (Entry<K, V> e : this) {
s.add(e.getValue());
}
return s;
}
@Override
public boolean remove(Object o) {
throw new UnsupportedOperationException("MonotonicMultiMap does not support non-monotonic method remove!");
}
@Override
public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException("MonotonicMultiMap does not support non-monotonic method removeAll!");
}
@Override
public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException("MonotonicMultiMap does not support non-monotonic method retainAll!");
}
@Override
public int size() {
return size;
}
@Override
public Entry<K, V>[] toArray() {
throw new UnsupportedOperationException("No toArray() for MonotonicMulitMap (yet)");
}
@Override
public <T> T[] toArray(T[] a) {
throw new UnsupportedOperationException("No toArray() for MonotonicMulitMap (yet)");
}
public boolean containsKey(K k) {
if (map.containsKey(k)) {
return !getValuesForKey(k).isEmpty();
} else {
return false;
}
}
}
| |
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.preference;
import static androidx.annotation.RestrictTo.Scope.LIBRARY_GROUP;
import android.app.DialogFragment;
import android.app.Fragment;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.util.TypedValue;
import android.view.ContextThemeWrapper;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RestrictTo;
import androidx.annotation.XmlRes;
import androidx.core.content.res.TypedArrayUtils;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
/**
* Shows a hierarchy of {@link Preference} objects as
* lists. These preferences will
* automatically save to {@link android.content.SharedPreferences} as the user interacts with
* them. To retrieve an instance of {@link android.content.SharedPreferences} that the
* preference hierarchy in this fragment will use, call
* {@link PreferenceManager#getDefaultSharedPreferences(android.content.Context)}
* with a context in the same package as this fragment.
* <p>
* Furthermore, the preferences shown will follow the visual style of system
* preferences. It is easy to create a hierarchy of preferences (that can be
* shown on multiple screens) via XML. For these reasons, it is recommended to
* use this fragment (as a superclass) to deal with preferences in applications.
* <p>
* A {@link PreferenceScreen} object should be at the top of the preference
* hierarchy. Furthermore, subsequent {@link PreferenceScreen} in the hierarchy
* denote a screen break--that is the preferences contained within subsequent
* {@link PreferenceScreen} should be shown on another screen. The preference
* framework handles this by calling {@link #onNavigateToScreen(PreferenceScreen)}.
* <p>
* The preference hierarchy can be formed in multiple ways:
* <li> From an XML file specifying the hierarchy
* <li> From different {@link android.app.Activity Activities} that each specify its own
* preferences in an XML file via {@link android.app.Activity} meta-data
* <li> From an object hierarchy rooted with {@link PreferenceScreen}
* <p>
* To inflate from XML, use the {@link #addPreferencesFromResource(int)}. The
* root element should be a {@link PreferenceScreen}. Subsequent elements can point
* to actual {@link Preference} subclasses. As mentioned above, subsequent
* {@link PreferenceScreen} in the hierarchy will result in the screen break.
* <p>
* To specify an object hierarchy rooted with {@link PreferenceScreen}, use
* {@link #setPreferenceScreen(PreferenceScreen)}.
* <p>
* As a convenience, this fragment implements a click listener for any
* preference in the current hierarchy, see
* {@link #onPreferenceTreeClick(Preference)}.
*
* <div class="special reference">
* <h3>Developer Guides</h3>
* <p>For information about using {@code PreferenceFragment},
* read the <a href="{@docRoot}guide/topics/ui/settings.html">Settings</a>
* guide.</p>
* </div>
*
* <a name="SampleCode"></a>
* <h3>Sample Code</h3>
*
* <p>The following sample code shows a simple preference fragment that is
* populated from a resource. The resource it loads is:</p>
*
* {@sample frameworks/support/samples/SupportPreferenceDemos/src/main/res/xml/preferences.xml preferences}
*
* <p>The fragment implementation itself simply populates the preferences
* when created. Note that the preferences framework takes care of loading
* the current values out of the app preferences and writing them when changed:</p>
*
* {@sample frameworks/support/samples/SupportPreferenceDemos/src/main/java/com/example/android/supportpreference/FragmentSupportPreferences.java
* support_fragment}
*
* @see Preference
* @see PreferenceScreen
*/
public abstract class PreferenceFragment extends Fragment implements
PreferenceManager.OnPreferenceTreeClickListener,
PreferenceManager.OnDisplayPreferenceDialogListener,
PreferenceManager.OnNavigateToScreenListener,
DialogPreference.TargetFragment {
/**
* Fragment argument used to specify the tag of the desired root
* {@link androidx.preference.PreferenceScreen} object.
*/
public static final String ARG_PREFERENCE_ROOT =
"androidx.preference.PreferenceFragmentCompat.PREFERENCE_ROOT";
private static final String PREFERENCES_TAG = "android:preferences";
private static final String DIALOG_FRAGMENT_TAG =
"androidx.preference.PreferenceFragment.DIALOG";
private PreferenceManager mPreferenceManager;
private RecyclerView mList;
private boolean mHavePrefs;
private boolean mInitDone;
private Context mStyledContext;
private int mLayoutResId = androidx.preference.R.layout.preference_list_fragment;
private final DividerDecoration mDividerDecoration = new DividerDecoration();
private static final int MSG_BIND_PREFERENCES = 1;
private final Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_BIND_PREFERENCES:
bindPreferences();
break;
}
}
};
private final Runnable mRequestFocus = new Runnable() {
@Override
public void run() {
mList.focusableViewAvailable(mList);
}
};
private Runnable mSelectPreferenceRunnable;
/**
* Interface that PreferenceFragment's containing activity should
* implement to be able to process preference items that wish to
* switch to a specified fragment.
*/
public interface OnPreferenceStartFragmentCallback {
/**
* Called when the user has clicked on a Preference that has
* a fragment class name associated with it. The implementation
* should instantiate and switch to an instance of the given
* fragment.
* @param caller The fragment requesting navigation.
* @param pref The preference requesting the fragment.
* @return true if the fragment creation has been handled
*/
boolean onPreferenceStartFragment(PreferenceFragment caller, Preference pref);
}
/**
* Interface that PreferenceFragment's containing activity should
* implement to be able to process preference items that wish to
* switch to a new screen of preferences.
*/
public interface OnPreferenceStartScreenCallback {
/**
* Called when the user has clicked on a PreferenceScreen item in order to navigate to a new
* screen of preferences.
* @param caller The fragment requesting navigation.
* @param pref The preference screen to navigate to.
* @return true if the screen navigation has been handled
*/
boolean onPreferenceStartScreen(PreferenceFragment caller, PreferenceScreen pref);
}
public interface OnPreferenceDisplayDialogCallback {
/**
*
* @param caller The fragment containing the preference requesting the dialog.
* @param pref The preference requesting the dialog.
* @return true if the dialog creation has been handled.
*/
boolean onPreferenceDisplayDialog(@NonNull PreferenceFragment caller, Preference pref);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final TypedValue tv = new TypedValue();
getActivity().getTheme().resolveAttribute(
androidx.preference.R.attr.preferenceTheme, tv, true);
final int theme = tv.resourceId;
if (theme == 0) {
throw new IllegalStateException("Must specify preferenceTheme in theme");
}
mStyledContext = new ContextThemeWrapper(getActivity(), theme);
mPreferenceManager = new PreferenceManager(mStyledContext);
mPreferenceManager.setOnNavigateToScreenListener(this);
final Bundle args = getArguments();
final String rootKey;
if (args != null) {
rootKey = getArguments().getString(ARG_PREFERENCE_ROOT);
} else {
rootKey = null;
}
onCreatePreferences(savedInstanceState, rootKey);
}
/**
* Called during {@link #onCreate(Bundle)} to supply the preferences for this fragment.
* Subclasses are expected to call {@link #setPreferenceScreen(PreferenceScreen)} either
* directly or via helper methods such as {@link #addPreferencesFromResource(int)}.
*
* @param savedInstanceState If the fragment is being re-created from
* a previous saved state, this is the state.
* @param rootKey If non-null, this preference fragment should be rooted at the
* {@link androidx.preference.PreferenceScreen} with this key.
*/
public abstract void onCreatePreferences(Bundle savedInstanceState, String rootKey);
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
TypedArray a = mStyledContext.obtainStyledAttributes(null,
R.styleable.PreferenceFragment,
TypedArrayUtils.getAttr(mStyledContext,
androidx.preference.R.attr.preferenceFragmentStyle,
AndroidResources.ANDROID_R_PREFERENCE_FRAGMENT_STYLE),
0);
mLayoutResId = a.getResourceId(R.styleable.PreferenceFragment_android_layout, mLayoutResId);
final Drawable divider = a.getDrawable(R.styleable.PreferenceFragment_android_divider);
final int dividerHeight = a.getDimensionPixelSize(
R.styleable.PreferenceFragment_android_dividerHeight, -1);
final boolean allowDividerAfterLastItem = a.getBoolean(
R.styleable.PreferenceFragment_allowDividerAfterLastItem, true);
a.recycle();
// Need to theme the inflater to pick up the preferenceFragmentListStyle
final TypedValue tv = new TypedValue();
getActivity().getTheme().resolveAttribute(
androidx.preference.R.attr.preferenceTheme, tv, true);
final int theme = tv.resourceId;
final Context themedContext = new ContextThemeWrapper(inflater.getContext(), theme);
final LayoutInflater themedInflater = inflater.cloneInContext(themedContext);
final View view = themedInflater.inflate(mLayoutResId, container, false);
final View rawListContainer = view.findViewById(AndroidResources.ANDROID_R_LIST_CONTAINER);
if (!(rawListContainer instanceof ViewGroup)) {
throw new RuntimeException("Content has view with id attribute "
+ "'android.R.id.list_container' that is not a ViewGroup class");
}
final ViewGroup listContainer = (ViewGroup) rawListContainer;
final RecyclerView listView = onCreateRecyclerView(themedInflater, listContainer,
savedInstanceState);
if (listView == null) {
throw new RuntimeException("Could not create RecyclerView");
}
mList = listView;
listView.addItemDecoration(mDividerDecoration);
setDivider(divider);
if (dividerHeight != -1) {
setDividerHeight(dividerHeight);
}
mDividerDecoration.setAllowDividerAfterLastItem(allowDividerAfterLastItem);
listContainer.addView(mList);
mHandler.post(mRequestFocus);
return view;
}
/**
* Sets the drawable that will be drawn between each item in the list.
* <p>
* <strong>Note:</strong> If the drawable does not have an intrinsic
* height, you should also call {@link #setDividerHeight(int)}.
*
* @param divider the drawable to use
* @attr ref R.styleable#PreferenceFragment_android_divider
*/
public void setDivider(Drawable divider) {
mDividerDecoration.setDivider(divider);
}
/**
* Sets the height of the divider that will be drawn between each item in the list. Calling
* this will override the intrinsic height as set by {@link #setDivider(Drawable)}
*
* @param height The new height of the divider in pixels.
* @attr ref R.styleable#PreferenceFragment_android_dividerHeight
*/
public void setDividerHeight(int height) {
mDividerDecoration.setDividerHeight(height);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
if (mHavePrefs) {
bindPreferences();
if (mSelectPreferenceRunnable != null) {
mSelectPreferenceRunnable.run();
mSelectPreferenceRunnable = null;
}
}
mInitDone = true;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (savedInstanceState != null) {
Bundle container = savedInstanceState.getBundle(PREFERENCES_TAG);
if (container != null) {
final PreferenceScreen preferenceScreen = getPreferenceScreen();
if (preferenceScreen != null) {
preferenceScreen.restoreHierarchyState(container);
}
}
}
}
@Override
public void onStart() {
super.onStart();
mPreferenceManager.setOnPreferenceTreeClickListener(this);
mPreferenceManager.setOnDisplayPreferenceDialogListener(this);
}
@Override
public void onStop() {
super.onStop();
mPreferenceManager.setOnPreferenceTreeClickListener(null);
mPreferenceManager.setOnDisplayPreferenceDialogListener(null);
}
@Override
public void onDestroyView() {
mHandler.removeCallbacks(mRequestFocus);
mHandler.removeMessages(MSG_BIND_PREFERENCES);
if (mHavePrefs) {
unbindPreferences();
}
mList = null;
super.onDestroyView();
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
final PreferenceScreen preferenceScreen = getPreferenceScreen();
if (preferenceScreen != null) {
Bundle container = new Bundle();
preferenceScreen.saveHierarchyState(container);
outState.putBundle(PREFERENCES_TAG, container);
}
}
/**
* Returns the {@link PreferenceManager} used by this fragment.
* @return The {@link PreferenceManager}.
*/
public PreferenceManager getPreferenceManager() {
return mPreferenceManager;
}
/**
* Sets the root of the preference hierarchy that this fragment is showing.
*
* @param preferenceScreen The root {@link PreferenceScreen} of the preference hierarchy.
*/
public void setPreferenceScreen(PreferenceScreen preferenceScreen) {
if (mPreferenceManager.setPreferences(preferenceScreen) && preferenceScreen != null) {
onUnbindPreferences();
mHavePrefs = true;
if (mInitDone) {
postBindPreferences();
}
}
}
/**
* Gets the root of the preference hierarchy that this fragment is showing.
*
* @return The {@link PreferenceScreen} that is the root of the preference
* hierarchy.
*/
public PreferenceScreen getPreferenceScreen() {
return mPreferenceManager.getPreferenceScreen();
}
/**
* Inflates the given XML resource and adds the preference hierarchy to the current
* preference hierarchy.
*
* @param preferencesResId The XML resource ID to inflate.
*/
public void addPreferencesFromResource(@XmlRes int preferencesResId) {
requirePreferenceManager();
setPreferenceScreen(mPreferenceManager.inflateFromResource(mStyledContext,
preferencesResId, getPreferenceScreen()));
}
/**
* Inflates the given XML resource and replaces the current preference hierarchy (if any) with
* the preference hierarchy rooted at {@code key}.
*
* @param preferencesResId The XML resource ID to inflate.
* @param key The preference key of the {@link androidx.preference.PreferenceScreen}
* to use as the root of the preference hierarchy, or null to use the root
* {@link androidx.preference.PreferenceScreen}.
*/
public void setPreferencesFromResource(@XmlRes int preferencesResId, @Nullable String key) {
requirePreferenceManager();
final PreferenceScreen xmlRoot = mPreferenceManager.inflateFromResource(mStyledContext,
preferencesResId, null);
final Preference root;
if (key != null) {
root = xmlRoot.findPreference(key);
if (!(root instanceof PreferenceScreen)) {
throw new IllegalArgumentException("Preference object with key " + key
+ " is not a PreferenceScreen");
}
} else {
root = xmlRoot;
}
setPreferenceScreen((PreferenceScreen) root);
}
/**
* {@inheritDoc}
*/
@Override
public boolean onPreferenceTreeClick(Preference preference) {
if (preference.getFragment() != null) {
boolean handled = false;
if (getCallbackFragment() instanceof OnPreferenceStartFragmentCallback) {
handled = ((OnPreferenceStartFragmentCallback) getCallbackFragment())
.onPreferenceStartFragment(this, preference);
}
if (!handled && getActivity() instanceof OnPreferenceStartFragmentCallback) {
handled = ((OnPreferenceStartFragmentCallback) getActivity())
.onPreferenceStartFragment(this, preference);
}
return handled;
}
return false;
}
/**
* Called by
* {@link androidx.preference.PreferenceScreen#onClick()} in order to navigate to a
* new screen of preferences. Calls
* {@link PreferenceFragment.OnPreferenceStartScreenCallback#onPreferenceStartScreen}
* if the target fragment or containing activity implements
* {@link PreferenceFragment.OnPreferenceStartScreenCallback}.
* @param preferenceScreen The {@link androidx.preference.PreferenceScreen} to
* navigate to.
*/
@Override
public void onNavigateToScreen(PreferenceScreen preferenceScreen) {
boolean handled = false;
if (getCallbackFragment() instanceof OnPreferenceStartScreenCallback) {
handled = ((OnPreferenceStartScreenCallback) getCallbackFragment())
.onPreferenceStartScreen(this, preferenceScreen);
}
if (!handled && getActivity() instanceof OnPreferenceStartScreenCallback) {
((OnPreferenceStartScreenCallback) getActivity())
.onPreferenceStartScreen(this, preferenceScreen);
}
}
/**
* Finds a {@link Preference} based on its key.
*
* @param key The key of the preference to retrieve.
* @return The {@link Preference} with the key, or null.
* @see androidx.preference.PreferenceGroup#findPreference(CharSequence)
*/
@Override
public Preference findPreference(CharSequence key) {
if (mPreferenceManager == null) {
return null;
}
return mPreferenceManager.findPreference(key);
}
private void requirePreferenceManager() {
if (mPreferenceManager == null) {
throw new RuntimeException("This should be called after super.onCreate.");
}
}
private void postBindPreferences() {
if (mHandler.hasMessages(MSG_BIND_PREFERENCES)) return;
mHandler.obtainMessage(MSG_BIND_PREFERENCES).sendToTarget();
}
private void bindPreferences() {
final PreferenceScreen preferenceScreen = getPreferenceScreen();
if (preferenceScreen != null) {
getListView().setAdapter(onCreateAdapter(preferenceScreen));
preferenceScreen.onAttached();
}
onBindPreferences();
}
private void unbindPreferences() {
final PreferenceScreen preferenceScreen = getPreferenceScreen();
if (preferenceScreen != null) {
preferenceScreen.onDetached();
}
onUnbindPreferences();
}
/** @hide */
@RestrictTo(LIBRARY_GROUP)
protected void onBindPreferences() {
}
/** @hide */
@RestrictTo(LIBRARY_GROUP)
protected void onUnbindPreferences() {
}
public final RecyclerView getListView() {
return mList;
}
/**
* Creates the {@link RecyclerView} used to display the preferences.
* Subclasses may override this to return a customized
* {@link RecyclerView}.
* @param inflater The LayoutInflater object that can be used to inflate the
* {@link RecyclerView}.
* @param parent The parent {@link android.view.View} that the RecyclerView will be attached to.
* This method should not add the view itself, but this can be used to generate
* the LayoutParams of the view.
* @param savedInstanceState If non-null, this view is being re-constructed from a previous
* saved state as given here
* @return A new RecyclerView object to be placed into the view hierarchy
*/
public RecyclerView onCreateRecyclerView(LayoutInflater inflater, ViewGroup parent,
Bundle savedInstanceState) {
RecyclerView recyclerView = (RecyclerView) inflater
.inflate(androidx.preference.R.layout.preference_recyclerview,
parent, false);
recyclerView.setLayoutManager(onCreateLayoutManager());
recyclerView.setAccessibilityDelegateCompat(
new PreferenceRecyclerViewAccessibilityDelegate(recyclerView));
return recyclerView;
}
/**
* Called from {@link #onCreateRecyclerView} to create the
* {@link RecyclerView.LayoutManager} for the created
* {@link RecyclerView}.
* @return A new {@link RecyclerView.LayoutManager} instance.
*/
public RecyclerView.LayoutManager onCreateLayoutManager() {
return new LinearLayoutManager(getActivity());
}
/**
* Creates the root adapter.
*
* @param preferenceScreen Preference screen object to create the adapter for.
* @return An adapter that contains the preferences contained in this {@link PreferenceScreen}.
*/
protected RecyclerView.Adapter onCreateAdapter(PreferenceScreen preferenceScreen) {
return new PreferenceGroupAdapter(preferenceScreen);
}
/**
* Called when a preference in the tree requests to display a dialog. Subclasses should
* override this method to display custom dialogs or to handle dialogs for custom preference
* classes.
*
* @param preference The Preference object requesting the dialog.
*/
@Override
public void onDisplayPreferenceDialog(Preference preference) {
boolean handled = false;
if (getCallbackFragment() instanceof OnPreferenceDisplayDialogCallback) {
handled = ((OnPreferenceDisplayDialogCallback) getCallbackFragment())
.onPreferenceDisplayDialog(this, preference);
}
if (!handled && getActivity() instanceof OnPreferenceDisplayDialogCallback) {
handled = ((OnPreferenceDisplayDialogCallback) getActivity())
.onPreferenceDisplayDialog(this, preference);
}
if (handled) {
return;
}
// check if dialog is already showing
if (getFragmentManager().findFragmentByTag(DIALOG_FRAGMENT_TAG) != null) {
return;
}
final DialogFragment f;
if (preference instanceof EditTextPreference) {
f = EditTextPreferenceDialogFragment.newInstance(preference.getKey());
} else if (preference instanceof ListPreference) {
f = ListPreferenceDialogFragment.newInstance(preference.getKey());
} else if (preference instanceof MultiSelectListPreference) {
f = MultiSelectListPreferenceDialogFragment.newInstance(preference.getKey());
} else {
throw new IllegalArgumentException("Tried to display dialog for unknown "
+ "preference type. Did you forget to override onDisplayPreferenceDialog()?");
}
f.setTargetFragment(this, 0);
f.show(getFragmentManager(), DIALOG_FRAGMENT_TAG);
}
/**
* Basically a wrapper for getParentFragment which is v17+. Used by the leanback preference lib.
* @return Fragment to possibly use as a callback
* @hide
*/
@RestrictTo(LIBRARY_GROUP)
public Fragment getCallbackFragment() {
return null;
}
public void scrollToPreference(final String key) {
scrollToPreferenceInternal(null, key);
}
public void scrollToPreference(final Preference preference) {
scrollToPreferenceInternal(preference, null);
}
private void scrollToPreferenceInternal(final Preference preference, final String key) {
final Runnable r = new Runnable() {
@Override
public void run() {
final RecyclerView.Adapter adapter = mList.getAdapter();
if (!(adapter instanceof PreferenceGroup.PreferencePositionCallback)) {
if (adapter != null) {
throw new IllegalStateException("Adapter must implement "
+ "PreferencePositionCallback");
} else {
// Adapter was set to null, so don't scroll I guess?
return;
}
}
final int position;
if (preference != null) {
position = ((PreferenceGroup.PreferencePositionCallback) adapter)
.getPreferenceAdapterPosition(preference);
} else {
position = ((PreferenceGroup.PreferencePositionCallback) adapter)
.getPreferenceAdapterPosition(key);
}
if (position != RecyclerView.NO_POSITION) {
mList.scrollToPosition(position);
} else {
// Item not found, wait for an update and try again
adapter.registerAdapterDataObserver(
new ScrollToPreferenceObserver(adapter, mList, preference, key));
}
}
};
if (mList == null) {
mSelectPreferenceRunnable = r;
} else {
r.run();
}
}
private static class ScrollToPreferenceObserver extends RecyclerView.AdapterDataObserver {
private final RecyclerView.Adapter mAdapter;
private final RecyclerView mList;
private final Preference mPreference;
private final String mKey;
ScrollToPreferenceObserver(RecyclerView.Adapter adapter, RecyclerView list,
Preference preference, String key) {
mAdapter = adapter;
mList = list;
mPreference = preference;
mKey = key;
}
private void scrollToPreference() {
mAdapter.unregisterAdapterDataObserver(this);
final int position;
if (mPreference != null) {
position = ((PreferenceGroup.PreferencePositionCallback) mAdapter)
.getPreferenceAdapterPosition(mPreference);
} else {
position = ((PreferenceGroup.PreferencePositionCallback) mAdapter)
.getPreferenceAdapterPosition(mKey);
}
if (position != RecyclerView.NO_POSITION) {
mList.scrollToPosition(position);
}
}
@Override
public void onChanged() {
scrollToPreference();
}
@Override
public void onItemRangeChanged(int positionStart, int itemCount) {
scrollToPreference();
}
@Override
public void onItemRangeChanged(int positionStart, int itemCount, Object payload) {
scrollToPreference();
}
@Override
public void onItemRangeInserted(int positionStart, int itemCount) {
scrollToPreference();
}
@Override
public void onItemRangeRemoved(int positionStart, int itemCount) {
scrollToPreference();
}
@Override
public void onItemRangeMoved(int fromPosition, int toPosition, int itemCount) {
scrollToPreference();
}
}
private class DividerDecoration extends RecyclerView.ItemDecoration {
private Drawable mDivider;
private int mDividerHeight;
private boolean mAllowDividerAfterLastItem = true;
@Override
public void onDrawOver(Canvas c, RecyclerView parent, RecyclerView.State state) {
if (mDivider == null) {
return;
}
final int childCount = parent.getChildCount();
final int width = parent.getWidth();
for (int childViewIndex = 0; childViewIndex < childCount; childViewIndex++) {
final View view = parent.getChildAt(childViewIndex);
if (shouldDrawDividerBelow(view, parent)) {
int top = (int) view.getY() + view.getHeight();
mDivider.setBounds(0, top, width, top + mDividerHeight);
mDivider.draw(c);
}
}
}
@Override
public void getItemOffsets(Rect outRect, View view, RecyclerView parent,
RecyclerView.State state) {
if (shouldDrawDividerBelow(view, parent)) {
outRect.bottom = mDividerHeight;
}
}
private boolean shouldDrawDividerBelow(View view, RecyclerView parent) {
final RecyclerView.ViewHolder holder = parent.getChildViewHolder(view);
final boolean dividerAllowedBelow = holder instanceof PreferenceViewHolder
&& ((PreferenceViewHolder) holder).isDividerAllowedBelow();
if (!dividerAllowedBelow) {
return false;
}
boolean nextAllowed = mAllowDividerAfterLastItem;
int index = parent.indexOfChild(view);
if (index < parent.getChildCount() - 1) {
final View nextView = parent.getChildAt(index + 1);
final RecyclerView.ViewHolder nextHolder = parent.getChildViewHolder(nextView);
nextAllowed = nextHolder instanceof PreferenceViewHolder
&& ((PreferenceViewHolder) nextHolder).isDividerAllowedAbove();
}
return nextAllowed;
}
public void setDivider(Drawable divider) {
if (divider != null) {
mDividerHeight = divider.getIntrinsicHeight();
} else {
mDividerHeight = 0;
}
mDivider = divider;
mList.invalidateItemDecorations();
}
public void setDividerHeight(int dividerHeight) {
mDividerHeight = dividerHeight;
mList.invalidateItemDecorations();
}
public void setAllowDividerAfterLastItem(boolean allowDividerAfterLastItem) {
mAllowDividerAfterLastItem = allowDividerAfterLastItem;
}
}
}
| |
/* -*- mode: java; c-basic-offset: 8; indent-tabs-mode: t; tab-width: 8 -*- */
package vib;
import amira.AmiraParameters;
import ij.ImageListener;
import ij.ImagePlus;
import ij.gui.ImageCanvas;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Image;
import java.awt.Polygon;
import java.awt.Shape;
import java.awt.geom.AffineTransform;
import java.awt.geom.GeneralPath;
import java.util.ArrayList;
import java.util.Vector;
public class SegmentationViewerCanvas extends ImageCanvas {
final static int OUTLINE=1, FILL=2;
int mode=FILL;
int alpha=128; // if mode==FILL, use this transparency to fill
protected ImagePlus labels;
int w,h,d;
Color[] label_colors; // these are the up to 256 material colors
Vector[] contours; // each element is a vector of polygons
Vector[] colors; // these are the corresponding colors
Vector[] indices; // these are the corresponding material IDs
private final boolean debug = false;
public SegmentationViewerCanvas(ImagePlus imp) {
super(imp);
label_colors = new Color[256];
w=imp.getWidth();
h=imp.getHeight();
d=imp.getStack().getSize();
contours=new Vector[d];
colors=new Vector[d];
indices=new Vector[d];
ImagePlus.addImageListener(new ImageListener() {
public void imageOpened(ImagePlus imp) { }
public void imageClosed(ImagePlus imp) { }
public void imageUpdated(ImagePlus imp) {
if (imp == labels)
setLabels(labels);
}
});
}
public SegmentationViewerCanvas(ImagePlus imp,ImagePlus labels) {
this(imp);
setLabels(labels);
}
public ImagePlus getLabels() {
return labels;
}
public void setLabels(ImagePlus labels) {
this.labels=labels;
contours=new Vector[d];
colors=new Vector[d];
indices=new Vector[d];
if (labels == null)
return;
AmiraParameters parameters=new AmiraParameters(labels);
int count = parameters.getMaterialCount();
for(int i=0;i<label_colors.length;i++) {
if (i >= count) {
label_colors[i] = Color.RED;
continue;
}
double[] c=parameters.getMaterialColor(i);
int red=(int)(255*c[0]);
int green=(int)(255*c[1]);
int blue=(int)(255*c[2]);
label_colors[i]=new Color(red,green,blue);
}
if (backBufferGraphics != null)
repaint();
}
public void updateSlice(int slice){
synchronized(this) {
colors[slice-1] = null;
contours[slice-1] = null;
indices[slice-1] = null;
createContoursIfNotExist(slice);
}
}
public GeneralPath getOutline(int slice, int materialId){
synchronized(this) {
createContoursIfNotExist(slice);
for (int i = 0; i < indices[slice-1].size(); i++)
if (((Integer)indices[slice-1].get(i)).intValue()
== materialId)
return (GeneralPath)contours[slice-1].get(i);
return null;
}
}
/*
* This class implements a Cartesian polygon in progress.
* The edges are supposed to be of unit length, and parallel to
* one axis.
* It is implemented as a deque to be able to add points to both
* sides.
* The points should be added such that for each pair of consecutive
* points, the inner part is on the left.
*/
static class Outline {
int[] x, y;
int first, last, reserved;
final int GROW = 10;
public Outline() {
reserved = GROW;
x = new int[reserved];
y = new int[reserved];
first = last = GROW / 2;
}
private void needs(int newCount, int offset) {
if (newCount > reserved || (offset > first)) {
if (newCount < reserved + GROW + 1)
newCount = reserved + GROW + 1;
int[] newX = new int[newCount];
int[] newY = new int[newCount];
System.arraycopy(x, 0, newX, offset, last);
System.arraycopy(y, 0, newY, offset, last);
x = newX;
y = newY;
first += offset;
last += offset;
reserved = newCount;
}
}
public Outline push(int x, int y) {
needs(last + 1, 0);
this.x[last] = x;
this.y[last] = y;
last++;
return this;
}
public Outline shift(int x, int y) {
needs(last + 1, GROW);
first--;
this.x[first] = x;
this.y[first] = y;
return this;
}
public Outline push(Outline o) {
int count = o.last - o.first;
needs(last + count, 0);
System.arraycopy(o.x, o.first, x, last, count);
System.arraycopy(o.y, o.first, y, last, count);
last += count;
return this;
}
public Outline shift(Outline o) {
int count = o.last - o.first;
needs(last + count + GROW, count + GROW);
first -= count;
System.arraycopy(o.x, o.first, x, first, count);
System.arraycopy(o.y, o.first, y, first, count);
return this;
}
public Polygon getPolygon() {
// TODO: optimize out long straight lines
int count = last - first;
int[] x1 = new int[count];
int[] y1 = new int[count];
System.arraycopy(x, first, x1, 0, count);
System.arraycopy(y, first, y1, 0, count);
return new Polygon(x1, y1, count);
}
public String toString() {
String res = "(first:" + first
+ ",last:" + last + ",reserved:" + reserved + ":";
if (last > x.length) System.err.println("ERROR!");
for (int i = first; i < last && i < x.length; i++)
res += "(" + x[i] + "," + y[i] + ")";
return res + ")";
}
}
class ContourFinder {
int slice;
byte[] pixels;
GeneralPath[] paths;
Outline[] outline;
public ContourFinder(int slice) {
this.slice=slice;
pixels=(byte[])labels.getStack().getProcessor(slice+1).getPixels();
paths = new GeneralPath[255];
}
// no check!
final byte get(int x,int y) { return pixels[y * w + x]; }
/*
* Construct all outlines simultaneously by traversing the rows
* from top to bottom.
*
* The open ends of the polygons are stored in outline[]:
* if the polygon ends at the left of the pixel at x in the
* previous row, and the pixel is not contained in the polygin,
* outline[2 * x] contains the partial outline;
* if the polygon contains the pixel, outline[2 * x + 1] holds
* the partial outline.
*/
public void initContours() {
contours[slice]=new Vector();
colors[slice]=new Vector();
indices[slice]=new Vector();
// actually find the outlines
ArrayList polygons = new ArrayList();
outline = new Outline[2 * w + 2];
for (int y = 0; y <= h; y++)
for (int x = 0; x < w; x++)
handle(x, y);
for (int i = 1; i < paths.length; i++) {
if (paths[i] != null) {
contours[slice].add(paths[i]);
colors[slice].add(label_colors[i]);
indices[slice].add(new Integer(i));
}
}
}
final private Outline newOutline(int left, int right,
int x1, int x2, int y) {
outline[left] = outline[right] = new Outline();
outline[left].push(x1, y);
outline[left].push(x2, y);
return outline[left];
}
final private Outline mergeOutlines(Outline left, Outline right) {
left.push(right);
for (int k = 0; k < outline.length; k++)
if (outline[k] == right) {
outline[k] = left;
return outline[k];
}
throw new RuntimeException("assertion failed!");
}
final private Outline moveOutline(int from, int to) {
outline[to] = outline[from];
outline[from] = null;
return outline[to];
}
private void closeOutline(byte material, Outline outline) {
int m = material & 0xff;
if(material == -1) m = 0;//????? Tom
if (paths[m] == null)
paths[m] = new GeneralPath(GeneralPath.WIND_EVEN_ODD);
paths[m].append(outline.getPolygon(), false);
}
private void handle(int x, int y) {
byte m = (y < h ? get(x, y) : 0);
byte mPrev = (y > 0 ? get(x, y - 1) : 0);
byte mLeft = (x > 0 && y < h ? get(x - 1, y) : 0);
byte mRight = (x < w - 1 && y < h ? get(x + 1, y) : 0);
byte mPrevLeft = (x > 0 && y > 0 ? get(x - 1, y - 1) : 0);
byte mPrevRight = (x < w - 1 && y > 0 ? get(x + 1, y - 1) : 0);
Outline left1 = outline[2 * x];
Outline left2 = outline[2 * x + 1];
Outline right2 = outline[2 * x + 2];
Outline right1 = outline[2 * x + 3];
outline[2 * x] = outline[2 * x + 3] = null;
outline[2 * x + 1] = outline[2 * x + 2] = null;
if (mPrev != 0 && mPrev != m) {
// lower edge
// - both null: new outline
// - left == null: shift
// - right == null: push
// - right == left: close
// - right != left: push
int l = 2 * x, r = 2 * x + 3;
if (left2 == null && right2 == null)
newOutline(l, r, x, x + 1, y);
else if (left2 == null)
outline[l] = right2.shift(x, y);
else if (right2 == null)
outline[r] = left2.push(x + 1, y);
else if (left2 == right2)
closeOutline(mPrev, left2);
else
mergeOutlines(left2, right2);
left2 = right2 = null;
}
if (m != 0 && mPrev != m) {
// upper edge:
// - left and right are null: new outline
// - left null: push
// - right null: shift
// - left == right: close
// - left != right: merge
int l = 2 * x + 1, r = 2 * x + 2;
if (left1 != null && mLeft != m) {
outline[2 * x] = left1;
left1 = null;
}
if (right1 != null && (mRight != m || mPrevRight != m)) {
outline[2 * x + 3] = right1;
right1 = null;
}
if (left1 == null && right1 == null)
newOutline(l, r, x + 1, x, y);
else if (left1 == null)
outline[l] = right1.push(x, y);
else if(right1 == null)
outline[r] = left1.shift(x + 1, y);
else if (left1 == right1)
closeOutline(m, left1);
else
mergeOutlines(right1, left1);
left1 = right1 = null;
}
if (left1 != null)
outline[2 * x] = left1;
if (left2 != null)
outline[2 * x + 1] = left2;
if (right1 != null)
outline[2 * x + 3] = right1;
if (right2 != null)
outline[2 * x + 2] = right2;
if (m != 0 && mLeft != m) {
// left edge
int l = 2 * x + 1;
if (outline[l] == null)
outline[l] = left2;
outline[l].push(x, y + 1);
}
if (mLeft != 0 && mLeft != m) {
// right edge
int l = 2 * x + 0;
if (outline[l] == null)
outline[l] = left1;
outline[l].shift(x, y + 1);
}
}
}
public void createContoursIfNotExist(int slice) {
if (labels == null || contours[slice-1]!=null)
return;
ContourFinder finder=new ContourFinder(slice-1);
finder.initContours();
}
private int backBufferWidth;
private int backBufferHeight;
private Graphics backBufferGraphics;
private Image backBufferImage;
private void resetBackBuffer() {
if(backBufferGraphics!=null){
backBufferGraphics.dispose();
backBufferGraphics=null;
}
if(backBufferImage!=null){
backBufferImage.flush();
backBufferImage=null;
}
backBufferWidth=getSize().width;
backBufferHeight=getSize().height;
backBufferImage=createImage(backBufferWidth,backBufferHeight);
backBufferGraphics=backBufferImage.getGraphics();
}
public void paint(Graphics g) {
if(backBufferWidth!=getSize().width ||
backBufferHeight!=getSize().height ||
backBufferImage==null ||
backBufferGraphics==null)
resetBackBuffer();
int slice = imp.getCurrentSlice();
synchronized(this) {
createContoursIfNotExist(slice);
super.paint(backBufferGraphics);
drawOverlay(backBufferGraphics,slice);
}
g.drawImage(backBufferImage,0,0,this);
}
void drawOverlay(Graphics g,int slice) {
if (labels == null)
return;
double magnification=getMagnification();
for(int i=0;i<contours[slice-1].size();i++) {
g.setColor((Color)colors[slice-1].get(i));
Shape poly = (Shape)contours[slice-1].get(i);
// take offset into account (magnification very high)
if(magnification!=1.0) {
AffineTransform trans = (((Graphics2D)g).getDeviceConfiguration()).getDefaultTransform();
trans.setTransform(magnification, 0,
0, magnification,
-srcRect.x * magnification,
-srcRect.y * magnification);
poly = trans.createTransformedShape(poly);
}
((Graphics2D)g).draw(poly);
if(mode==FILL) {
Color c=(Color)colors[slice-1].get(i);
Color c1=new Color(c.getRed(),c.getGreen(),c.getBlue(),alpha);
g.setColor(c1);
((Graphics2D)g).fill(poly);
}
}
}
}
| |
/*
* Copyright 2009 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jboss.netty.channel;
import java.net.SocketAddress;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.logging.InternalLogger;
import org.jboss.netty.logging.InternalLoggerFactory;
/**
* A {@link ChannelHandler} which provides an individual handler method
* for each event type. This handler down-casts the received upstream or
* or downstream event into more meaningful sub-type event and calls an
* appropriate handler method with the down-cast event. For an upstream
* event, the names of the methods are identical to the upstream event names,
* as introduced in the {@link ChannelEvent} documentation. For a
* downstream event, the names of the methods starts with the name of the
* operation and ends with {@code "Requested"}
* (e.g. {@link #writeRequested(ChannelHandlerContext, MessageEvent) writeRequested}.)
* <p>
* Please use {@link SimpleChannelUpstreamHandler} or
* {@link SimpleChannelDownstreamHandler} if you want to intercept only
* upstream or downstream events.
*
* <h3>Overriding the {@link #handleUpstream(ChannelHandlerContext, ChannelEvent) handleUpstream}
* and {@link #handleDownstream(ChannelHandlerContext, ChannelEvent) handleDownstream} method</h3>
* <p>
* You can override the {@link #handleUpstream(ChannelHandlerContext, ChannelEvent) handleUpstream}
* and {@link #handleDownstream(ChannelHandlerContext, ChannelEvent) handleDownstream}
* method just like overriding an ordinary Java method. Please make sure to
* call {@code super.handleUpstream()} or {@code super.handleDownstream()} so
* that other handler methods are invoked properly:
* </p>
* <pre>public class MyChannelHandler extends {@link SimpleChannelHandler} {
*
* {@code @Override}
* public void handleUpstream({@link ChannelHandlerContext} ctx, {@link ChannelEvent} e) throws Exception {
*
* // Log all channel state changes.
* if (e instanceof {@link ChannelStateEvent}) {
* logger.info("Channel state changed: " + e);
* }
*
* <strong>super.handleUpstream(ctx, e);</strong>
* }
*
* {@code @Override}
* public void handleDownstream({@link ChannelHandlerContext} ctx, {@link ChannelEvent} e) throws Exception {
*
* // Log all channel state changes.
* if (e instanceof {@link MessageEvent}) {
* logger.info("Writing:: " + e);
* }
*
* <strong>super.handleDownstream(ctx, e);</strong>
* }
* }</pre>
*
* @author <a href="http://www.jboss.org/netty/">The Netty Project</a>
* @author <a href="http://gleamynode.net/">Trustin Lee</a>
*
* @version $Rev$, $Date$
*/
public class SimpleChannelHandler implements ChannelUpstreamHandler, ChannelDownstreamHandler {
private static final InternalLogger logger =
InternalLoggerFactory.getInstance(SimpleChannelHandler.class.getName());
/**
* Creates a new instance.
*/
public SimpleChannelHandler() {
super();
}
/**
* {@inheritDoc} Down-casts the received upstream event into more
* meaningful sub-type event and calls an appropriate handler method with
* the down-casted event.
*/
@Override
public void handleUpstream(
ChannelHandlerContext ctx, ChannelEvent e) throws Exception {
if (e instanceof MessageEvent) {
messageReceived(ctx, (MessageEvent) e);
} else if (e instanceof WriteCompletionEvent) {
WriteCompletionEvent evt = (WriteCompletionEvent) e;
writeComplete(ctx, evt);
} else if (e instanceof ChildChannelStateEvent) {
ChildChannelStateEvent evt = (ChildChannelStateEvent) e;
if (evt.getChildChannel().isOpen()) {
childChannelOpen(ctx, evt);
} else {
childChannelClosed(ctx, evt);
}
} else if (e instanceof ChannelStateEvent) {
ChannelStateEvent evt = (ChannelStateEvent) e;
switch (evt.getState()) {
case OPEN:
if (Boolean.TRUE.equals(evt.getValue())) {
channelOpen(ctx, evt);
} else {
channelClosed(ctx, evt);
}
break;
case BOUND:
if (evt.getValue() != null) {
channelBound(ctx, evt);
} else {
channelUnbound(ctx, evt);
}
break;
case CONNECTED:
if (evt.getValue() != null) {
channelConnected(ctx, evt);
} else {
channelDisconnected(ctx, evt);
}
break;
case INTEREST_OPS:
channelInterestChanged(ctx, evt);
break;
default:
ctx.sendUpstream(e);
}
} else if (e instanceof ExceptionEvent) {
exceptionCaught(ctx, (ExceptionEvent) e);
} else {
ctx.sendUpstream(e);
}
}
/**
* Invoked when a message object (e.g: {@link ChannelBuffer}) was received
* from a remote peer.
*/
public void messageReceived(
ChannelHandlerContext ctx, MessageEvent e) throws Exception {
ctx.sendUpstream(e);
}
/**
* Invoked when an exception was raised by an I/O thread or a
* {@link ChannelHandler}.
*/
public void exceptionCaught(
ChannelHandlerContext ctx, ExceptionEvent e) throws Exception {
if (this == ctx.getPipeline().getLast()) {
logger.warn(
"EXCEPTION, please implement " + getClass().getName() +
".exceptionCaught() for proper handling.", e.getCause());
}
ctx.sendUpstream(e);
}
/**
* Invoked when a {@link Channel} is open, but not bound nor connected.
*/
public void channelOpen(
ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
ctx.sendUpstream(e);
}
/**
* Invoked when a {@link Channel} is open and bound to a local address,
* but not connected.
*/
public void channelBound(
ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
ctx.sendUpstream(e);
}
/**
* Invoked when a {@link Channel} is open, bound to a local address, and
* connected to a remote address.
*/
public void channelConnected(
ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
ctx.sendUpstream(e);
}
/**
* Invoked when a {@link Channel}'s {@link Channel#getInterestOps() interestOps}
* was changed.
*/
public void channelInterestChanged(
ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
ctx.sendUpstream(e);
}
/**
* Invoked when a {@link Channel} was disconnected from its remote peer.
*/
public void channelDisconnected(
ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
ctx.sendUpstream(e);
}
/**
* Invoked when a {@link Channel} was unbound from the current local address.
*/
public void channelUnbound(
ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
ctx.sendUpstream(e);
}
/**
* Invoked when a {@link Channel} was closed and all its related resources
* were released.
*/
public void channelClosed(
ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
ctx.sendUpstream(e);
}
/**
* Invoked when something was written into a {@link Channel}.
*/
public void writeComplete(
ChannelHandlerContext ctx, WriteCompletionEvent e) throws Exception {
ctx.sendUpstream(e);
}
/**
* Invoked when a child {@link Channel} was open.
* (e.g. a server channel accepted a connection)
*/
public void childChannelOpen(
ChannelHandlerContext ctx, ChildChannelStateEvent e) throws Exception {
ctx.sendUpstream(e);
}
/**
* Invoked when a child {@link Channel} was closed.
* (e.g. the accepted connection was closed)
*/
public void childChannelClosed(
ChannelHandlerContext ctx, ChildChannelStateEvent e) throws Exception {
ctx.sendUpstream(e);
}
/**
* {@inheritDoc} Down-casts the received downstream event into more
* meaningful sub-type event and calls an appropriate handler method with
* the down-casted event.
*/
@Override
public void handleDownstream(ChannelHandlerContext ctx, ChannelEvent e)
throws Exception {
if (e instanceof MessageEvent) {
writeRequested(ctx, (MessageEvent) e);
} else if (e instanceof ChannelStateEvent) {
ChannelStateEvent evt = (ChannelStateEvent) e;
switch (evt.getState()) {
case OPEN:
if (!Boolean.TRUE.equals(evt.getValue())) {
closeRequested(ctx, evt);
}
break;
case BOUND:
if (evt.getValue() != null) {
bindRequested(ctx, evt);
} else {
unbindRequested(ctx, evt);
}
break;
case CONNECTED:
if (evt.getValue() != null) {
connectRequested(ctx, evt);
} else {
disconnectRequested(ctx, evt);
}
break;
case INTEREST_OPS:
setInterestOpsRequested(ctx, evt);
break;
default:
ctx.sendDownstream(e);
}
} else {
ctx.sendDownstream(e);
}
}
/**
* Invoked when {@link Channel#write(Object)} is called.
*/
public void writeRequested(ChannelHandlerContext ctx, MessageEvent e) throws Exception {
ctx.sendDownstream(e);
}
/**
* Invoked when {@link Channel#bind(SocketAddress)} was called.
*/
public void bindRequested(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
ctx.sendDownstream(e);
}
/**
* Invoked when {@link Channel#connect(SocketAddress)} was called.
*/
public void connectRequested(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
ctx.sendDownstream(e);
}
/**
* Invoked when {@link Channel#setInterestOps(int)} was called.
*/
public void setInterestOpsRequested(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
ctx.sendDownstream(e);
}
/**
* Invoked when {@link Channel#disconnect()} was called.
*/
public void disconnectRequested(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
ctx.sendDownstream(e);
}
/**
* Invoked when {@link Channel#unbind()} was called.
*/
public void unbindRequested(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
ctx.sendDownstream(e);
}
/**
* Invoked when {@link Channel#close()} was called.
*/
public void closeRequested(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
ctx.sendDownstream(e);
}
}
| |
package com.amazonaws.fps.model;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{http://fps.amazonaws.com/doc/2008-09-17/}GetOutstandingDebtBalanceResult" minOccurs="0"/>
* <element ref="{http://fps.amazonaws.com/doc/2008-09-17/}ResponseMetadata"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
* Generated by AWS Code Generator
* <p/>
* Tue Sep 29 03:25:23 PDT 2009
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"getOutstandingDebtBalanceResult",
"responseMetadata"
})
@XmlRootElement(name = "GetOutstandingDebtBalanceResponse")
public class GetOutstandingDebtBalanceResponse {
@XmlElement(name = "GetOutstandingDebtBalanceResult")
protected GetOutstandingDebtBalanceResult getOutstandingDebtBalanceResult;
@XmlElement(name = "ResponseMetadata", required = true)
protected ResponseMetadata responseMetadata;
/**
* Default constructor
*
*/
public GetOutstandingDebtBalanceResponse() {
super();
}
/**
* Value constructor
*
*/
public GetOutstandingDebtBalanceResponse(final GetOutstandingDebtBalanceResult getOutstandingDebtBalanceResult, final ResponseMetadata responseMetadata) {
this.getOutstandingDebtBalanceResult = getOutstandingDebtBalanceResult;
this.responseMetadata = responseMetadata;
}
/**
* Gets the value of the getOutstandingDebtBalanceResult property.
*
* @return
* possible object is
* {@link GetOutstandingDebtBalanceResult }
*
*/
public GetOutstandingDebtBalanceResult getGetOutstandingDebtBalanceResult() {
return getOutstandingDebtBalanceResult;
}
/**
* Sets the value of the getOutstandingDebtBalanceResult property.
*
* @param value
* allowed object is
* {@link GetOutstandingDebtBalanceResult }
*
*/
public void setGetOutstandingDebtBalanceResult(GetOutstandingDebtBalanceResult value) {
this.getOutstandingDebtBalanceResult = value;
}
public boolean isSetGetOutstandingDebtBalanceResult() {
return (this.getOutstandingDebtBalanceResult!= null);
}
/**
* Gets the value of the responseMetadata property.
*
* @return
* possible object is
* {@link ResponseMetadata }
*
*/
public ResponseMetadata getResponseMetadata() {
return responseMetadata;
}
/**
* Sets the value of the responseMetadata property.
*
* @param value
* allowed object is
* {@link ResponseMetadata }
*
*/
public void setResponseMetadata(ResponseMetadata value) {
this.responseMetadata = value;
}
public boolean isSetResponseMetadata() {
return (this.responseMetadata!= null);
}
/**
* Sets the value of the GetOutstandingDebtBalanceResult property.
*
* @param value
* @return
* this instance
*/
public GetOutstandingDebtBalanceResponse withGetOutstandingDebtBalanceResult(GetOutstandingDebtBalanceResult value) {
setGetOutstandingDebtBalanceResult(value);
return this;
}
/**
* Sets the value of the ResponseMetadata property.
*
* @param value
* @return
* this instance
*/
public GetOutstandingDebtBalanceResponse withResponseMetadata(ResponseMetadata value) {
setResponseMetadata(value);
return this;
}
/**
*
* XML string representation of this object
*
* @return XML String
*/
public String toXML() {
StringBuffer xml = new StringBuffer();
xml.append("<GetOutstandingDebtBalanceResponse xmlns=\"http://fps.amazonaws.com/doc/2008-09-17/\">");
if (isSetGetOutstandingDebtBalanceResult()) {
GetOutstandingDebtBalanceResult getOutstandingDebtBalanceResult = getGetOutstandingDebtBalanceResult();
xml.append("<GetOutstandingDebtBalanceResult>");
xml.append(getOutstandingDebtBalanceResult.toXMLFragment());
xml.append("</GetOutstandingDebtBalanceResult>");
}
if (isSetResponseMetadata()) {
ResponseMetadata responseMetadata = getResponseMetadata();
xml.append("<ResponseMetadata>");
xml.append(responseMetadata.toXMLFragment());
xml.append("</ResponseMetadata>");
}
xml.append("</GetOutstandingDebtBalanceResponse>");
return xml.toString();
}
/**
*
* Escape XML special characters
*/
private String escapeXML(String string) {
StringBuffer sb = new StringBuffer();
int length = string.length();
for (int i = 0; i < length; ++i) {
char c = string.charAt(i);
switch (c) {
case '&':
sb.append("&");
break;
case '<':
sb.append("<");
break;
case '>':
sb.append(">");
break;
case '\'':
sb.append("'");
break;
case '"':
sb.append(""");
break;
default:
sb.append(c);
}
}
return sb.toString();
}
/**
*
* JSON string representation of this object
*
* @return JSON String
*/
public String toJSON() {
StringBuffer json = new StringBuffer();
json.append("{\"GetOutstandingDebtBalanceResponse\" : {");
json.append(quoteJSON("@xmlns"));
json.append(" : ");
json.append(quoteJSON("http://fps.amazonaws.com/doc/2008-09-17/"));
boolean first = true;
json.append(", ");
if (isSetGetOutstandingDebtBalanceResult()) {
if (!first) json.append(", ");
json.append("\"GetOutstandingDebtBalanceResult\" : {");
GetOutstandingDebtBalanceResult getOutstandingDebtBalanceResult = getGetOutstandingDebtBalanceResult();
json.append(getOutstandingDebtBalanceResult.toJSONFragment());
json.append("}");
first = false;
}
if (isSetResponseMetadata()) {
if (!first) json.append(", ");
json.append("\"ResponseMetadata\" : {");
ResponseMetadata responseMetadata = getResponseMetadata();
json.append(responseMetadata.toJSONFragment());
json.append("}");
first = false;
}
json.append("}");
json.append("}");
return json.toString();
}
/**
*
* Quote JSON string
*/
private String quoteJSON(String string) {
StringBuffer sb = new StringBuffer();
sb.append("\"");
int length = string.length();
for (int i = 0; i < length; ++i) {
char c = string.charAt(i);
switch (c) {
case '"':
sb.append("\\\"");
break;
case '\\':
sb.append("\\\\");
break;
case '/':
sb.append("\\/");
break;
case '\b':
sb.append("\\b");
break;
case '\f':
sb.append("\\f");
break;
case '\n':
sb.append("\\n");
break;
case '\r':
sb.append("\\r");
break;
case '\t':
sb.append("\\t");
break;
default:
if (c < ' ') {
sb.append("\\u" + String.format("%03x", Integer.valueOf(c)));
} else {
sb.append(c);
}
}
}
sb.append("\"");
return sb.toString();
}
}
| |
/*
* Druid - a distributed column store.
* Copyright 2012 - 2015 Metamarkets Group Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.druid.indexing.common.task;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import com.metamx.common.ISE;
import com.metamx.common.guava.Comparators;
import com.metamx.common.logger.Logger;
import io.druid.data.input.Firehose;
import io.druid.data.input.FirehoseFactory;
import io.druid.data.input.InputRow;
import io.druid.data.input.Rows;
import io.druid.granularity.QueryGranularity;
import io.druid.indexing.common.TaskLock;
import io.druid.indexing.common.TaskStatus;
import io.druid.indexing.common.TaskToolbox;
import io.druid.indexing.common.index.YeOldePlumberSchool;
import io.druid.query.aggregation.hyperloglog.HyperLogLogCollector;
import io.druid.segment.IndexSpec;
import io.druid.segment.indexing.DataSchema;
import io.druid.segment.indexing.IOConfig;
import io.druid.segment.indexing.IngestionSpec;
import io.druid.segment.indexing.RealtimeTuningConfig;
import io.druid.segment.indexing.TuningConfig;
import io.druid.segment.indexing.granularity.GranularitySpec;
import io.druid.segment.loading.DataSegmentPusher;
import io.druid.segment.realtime.FireDepartmentMetrics;
import io.druid.segment.realtime.plumber.Plumber;
import io.druid.timeline.DataSegment;
import io.druid.timeline.partition.HashBasedNumberedShardSpec;
import io.druid.timeline.partition.NoneShardSpec;
import io.druid.timeline.partition.ShardSpec;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import java.util.SortedSet;
import java.util.concurrent.CopyOnWriteArrayList;
public class IndexTask extends AbstractFixedIntervalTask
{
private static final Logger log = new Logger(IndexTask.class);
private static HashFunction hashFunction = Hashing.murmur3_128();
/**
* Should we index this inputRow? Decision is based on our interval and shardSpec.
*
* @param inputRow the row to check
*
* @return true or false
*/
private static boolean shouldIndex(
final ShardSpec shardSpec,
final Interval interval,
final InputRow inputRow,
final QueryGranularity rollupGran
)
{
return interval.contains(inputRow.getTimestampFromEpoch())
&& shardSpec.isInChunk(rollupGran.truncate(inputRow.getTimestampFromEpoch()), inputRow);
}
private static String makeId(String id, IndexIngestionSpec ingestionSchema)
{
if (id == null) {
return String.format("index_%s_%s", makeDataSource(ingestionSchema), new DateTime().toString());
}
return id;
}
private static String makeDataSource(IndexIngestionSpec ingestionSchema)
{
return ingestionSchema.getDataSchema().getDataSource();
}
private static Interval makeInterval(IndexIngestionSpec ingestionSchema)
{
GranularitySpec spec = ingestionSchema.getDataSchema().getGranularitySpec();
return new Interval(
spec.bucketIntervals().get().first().getStart(),
spec.bucketIntervals().get().last().getEnd()
);
}
static RealtimeTuningConfig convertTuningConfig(ShardSpec spec, IndexTuningConfig config)
{
return new RealtimeTuningConfig(
config.getRowFlushBoundary(),
null,
null,
null,
null,
null,
null,
spec,
config.getIndexSpec(),
null,
null,
null,
null
);
}
@JsonIgnore
private final IndexIngestionSpec ingestionSchema;
private final ObjectMapper jsonMapper;
@JsonCreator
public IndexTask(
@JsonProperty("id") String id,
@JsonProperty("spec") IndexIngestionSpec ingestionSchema,
@JacksonInject ObjectMapper jsonMapper
)
{
super(
// _not_ the version, just something uniqueish
makeId(id, ingestionSchema),
makeDataSource(ingestionSchema),
makeInterval(ingestionSchema)
);
this.ingestionSchema = ingestionSchema;
this.jsonMapper = jsonMapper;
}
@Override
public String getType()
{
return "index";
}
@JsonProperty("spec")
public IndexIngestionSpec getIngestionSchema()
{
return ingestionSchema;
}
@Override
public TaskStatus run(TaskToolbox toolbox) throws Exception
{
final GranularitySpec granularitySpec = ingestionSchema.getDataSchema().getGranularitySpec();
final int targetPartitionSize = ingestionSchema.getTuningConfig().getTargetPartitionSize();
final TaskLock myLock = Iterables.getOnlyElement(getTaskLocks(toolbox));
final Set<DataSegment> segments = Sets.newHashSet();
final Set<Interval> validIntervals = Sets.intersection(granularitySpec.bucketIntervals().get(), getDataIntervals());
if (validIntervals.isEmpty()) {
throw new ISE("No valid data intervals found. Check your configs!");
}
for (final Interval bucket : validIntervals) {
final List<ShardSpec> shardSpecs;
if (targetPartitionSize > 0) {
shardSpecs = determinePartitions(bucket, targetPartitionSize, granularitySpec.getQueryGranularity());
} else {
int numShards = ingestionSchema.getTuningConfig().getNumShards();
if (numShards > 0) {
shardSpecs = Lists.newArrayList();
for (int i = 0; i < numShards; i++) {
shardSpecs.add(new HashBasedNumberedShardSpec(i, numShards, jsonMapper));
}
} else {
shardSpecs = ImmutableList.<ShardSpec>of(new NoneShardSpec());
}
}
for (final ShardSpec shardSpec : shardSpecs) {
final DataSegment segment = generateSegment(
toolbox,
ingestionSchema.getDataSchema(),
shardSpec,
bucket,
myLock.getVersion()
);
segments.add(segment);
}
}
toolbox.pushSegments(segments);
return TaskStatus.success(getId());
}
private SortedSet<Interval> getDataIntervals() throws IOException
{
final FirehoseFactory firehoseFactory = ingestionSchema.getIOConfig().getFirehoseFactory();
final GranularitySpec granularitySpec = ingestionSchema.getDataSchema().getGranularitySpec();
SortedSet<Interval> retVal = Sets.newTreeSet(Comparators.intervalsByStartThenEnd());
int unparsed = 0;
try (Firehose firehose = firehoseFactory.connect(ingestionSchema.getDataSchema().getParser())) {
while (firehose.hasMore()) {
final InputRow inputRow = firehose.nextRow();
DateTime dt = new DateTime(inputRow.getTimestampFromEpoch());
Optional<Interval> interval = granularitySpec.bucketInterval(dt);
if (interval.isPresent()) {
retVal.add(interval.get());
} else {
unparsed++;
}
}
}
if (unparsed > 0) {
log.warn("Unable to to find a matching interval for [%,d] events", unparsed);
}
return retVal;
}
private List<ShardSpec> determinePartitions(
final Interval interval,
final int targetPartitionSize,
final QueryGranularity queryGranularity
) throws IOException
{
log.info("Determining partitions for interval[%s] with targetPartitionSize[%d]", interval, targetPartitionSize);
final FirehoseFactory firehoseFactory = ingestionSchema.getIOConfig().getFirehoseFactory();
// The implementation of this determine partitions stuff is less than optimal. Should be done better.
// Use HLL to estimate number of rows
HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector();
// Load data
try (Firehose firehose = firehoseFactory.connect(ingestionSchema.getDataSchema().getParser())) {
while (firehose.hasMore()) {
final InputRow inputRow = firehose.nextRow();
if (interval.contains(inputRow.getTimestampFromEpoch())) {
final List<Object> groupKey = Rows.toGroupKey(
queryGranularity.truncate(inputRow.getTimestampFromEpoch()),
inputRow
);
collector.add(
hashFunction.hashBytes(jsonMapper.writeValueAsBytes(groupKey)).asBytes()
);
}
}
}
final double numRows = collector.estimateCardinality();
log.info("Estimated approximately [%,f] rows of data.", numRows);
int numberOfShards = (int) Math.ceil(numRows / targetPartitionSize);
if ((double) numberOfShards > numRows) {
numberOfShards = (int) numRows;
}
log.info("Will require [%,d] shard(s).", numberOfShards);
// ShardSpecs we will return
final List<ShardSpec> shardSpecs = Lists.newArrayList();
if (numberOfShards == 1) {
shardSpecs.add(new NoneShardSpec());
} else {
for (int i = 0; i < numberOfShards; ++i) {
shardSpecs.add(new HashBasedNumberedShardSpec(i, numberOfShards, jsonMapper));
}
}
return shardSpecs;
}
private DataSegment generateSegment(
final TaskToolbox toolbox,
final DataSchema schema,
final ShardSpec shardSpec,
final Interval interval,
final String version
) throws IOException
{
// Set up temporary directory.
final File tmpDir = new File(
toolbox.getTaskWorkDir(),
String.format(
"%s_%s_%s_%s_%s",
this.getDataSource(),
interval.getStart(),
interval.getEnd(),
version,
shardSpec.getPartitionNum()
)
);
final FirehoseFactory firehoseFactory = ingestionSchema.getIOConfig().getFirehoseFactory();
final int rowFlushBoundary = ingestionSchema.getTuningConfig().getRowFlushBoundary();
// We need to track published segments.
final List<DataSegment> pushedSegments = new CopyOnWriteArrayList<DataSegment>();
final DataSegmentPusher wrappedDataSegmentPusher = new DataSegmentPusher()
{
@Override
public String getPathForHadoop(String dataSource)
{
return toolbox.getSegmentPusher().getPathForHadoop(dataSource);
}
@Override
public DataSegment push(File file, DataSegment segment) throws IOException
{
final DataSegment pushedSegment = toolbox.getSegmentPusher().push(file, segment);
pushedSegments.add(pushedSegment);
return pushedSegment;
}
};
// Create firehose + plumber
final FireDepartmentMetrics metrics = new FireDepartmentMetrics();
final Firehose firehose = firehoseFactory.connect(ingestionSchema.getDataSchema().getParser());
final Plumber plumber = new YeOldePlumberSchool(
interval,
version,
wrappedDataSegmentPusher,
tmpDir
).findPlumber(
schema,
convertTuningConfig(shardSpec, ingestionSchema.getTuningConfig()),
metrics
);
// rowFlushBoundary for this job
final int myRowFlushBoundary = rowFlushBoundary > 0
? rowFlushBoundary
: toolbox.getConfig().getDefaultRowFlushBoundary();
final QueryGranularity rollupGran = ingestionSchema.getDataSchema().getGranularitySpec().getQueryGranularity();
try {
plumber.startJob();
while (firehose.hasMore()) {
final InputRow inputRow = firehose.nextRow();
if (shouldIndex(shardSpec, interval, inputRow, rollupGran)) {
int numRows = plumber.add(inputRow);
if (numRows == -1) {
throw new ISE(
String.format(
"Was expecting non-null sink for timestamp[%s]",
new DateTime(inputRow.getTimestampFromEpoch())
)
);
}
metrics.incrementProcessed();
if (numRows >= myRowFlushBoundary) {
plumber.persist(firehose.commit());
}
} else {
metrics.incrementThrownAway();
}
}
}
finally {
firehose.close();
}
plumber.persist(firehose.commit());
try {
plumber.finishJob();
}
finally {
log.info(
"Task[%s] interval[%s] partition[%d] took in %,d rows (%,d processed, %,d unparseable, %,d thrown away)"
+ " and output %,d rows",
getId(),
interval,
shardSpec.getPartitionNum(),
metrics.processed() + metrics.unparseable() + metrics.thrownAway(),
metrics.processed(),
metrics.unparseable(),
metrics.thrownAway(),
metrics.rowOutput()
);
}
// We expect a single segment to have been created.
return Iterables.getOnlyElement(pushedSegments);
}
public static class IndexIngestionSpec extends IngestionSpec<IndexIOConfig, IndexTuningConfig>
{
private final DataSchema dataSchema;
private final IndexIOConfig ioConfig;
private final IndexTuningConfig tuningConfig;
@JsonCreator
public IndexIngestionSpec(
@JsonProperty("dataSchema") DataSchema dataSchema,
@JsonProperty("ioConfig") IndexIOConfig ioConfig,
@JsonProperty("tuningConfig") IndexTuningConfig tuningConfig
)
{
super(dataSchema, ioConfig, tuningConfig);
this.dataSchema = dataSchema;
this.ioConfig = ioConfig;
this.tuningConfig = tuningConfig == null ? new IndexTuningConfig(0, 0, null, null) : tuningConfig;
}
@Override
@JsonProperty("dataSchema")
public DataSchema getDataSchema()
{
return dataSchema;
}
@Override
@JsonProperty("ioConfig")
public IndexIOConfig getIOConfig()
{
return ioConfig;
}
@Override
@JsonProperty("tuningConfig")
public IndexTuningConfig getTuningConfig()
{
return tuningConfig;
}
}
@JsonTypeName("index")
public static class IndexIOConfig implements IOConfig
{
private final FirehoseFactory firehoseFactory;
@JsonCreator
public IndexIOConfig(
@JsonProperty("firehose") FirehoseFactory firehoseFactory
)
{
this.firehoseFactory = firehoseFactory;
}
@JsonProperty("firehose")
public FirehoseFactory getFirehoseFactory()
{
return firehoseFactory;
}
}
@JsonTypeName("index")
public static class IndexTuningConfig implements TuningConfig
{
private static final int DEFAULT_TARGET_PARTITION_SIZE = 5000000;
private static final int DEFAULT_ROW_FLUSH_BOUNDARY = 500000;
private static final IndexSpec DEFAULT_INDEX_SPEC = new IndexSpec();
private final int targetPartitionSize;
private final int rowFlushBoundary;
private final int numShards;
private final IndexSpec indexSpec;
@JsonCreator
public IndexTuningConfig(
@JsonProperty("targetPartitionSize") int targetPartitionSize,
@JsonProperty("rowFlushBoundary") int rowFlushBoundary,
@JsonProperty("numShards") @Nullable Integer numShards,
@JsonProperty("indexSpec") @Nullable IndexSpec indexSpec
)
{
this.targetPartitionSize = targetPartitionSize == 0 ? DEFAULT_TARGET_PARTITION_SIZE : targetPartitionSize;
Preconditions.checkArgument(rowFlushBoundary >= 0, "rowFlushBoundary should be positive or zero");
this.rowFlushBoundary = rowFlushBoundary == 0 ? DEFAULT_ROW_FLUSH_BOUNDARY : rowFlushBoundary;
this.numShards = numShards == null ? -1 : numShards;
this.indexSpec = indexSpec == null ? DEFAULT_INDEX_SPEC : indexSpec;
Preconditions.checkArgument(
this.targetPartitionSize == -1 || this.numShards == -1,
"targetPartitionsSize and shardCount both cannot be set"
);
}
@JsonProperty
public int getTargetPartitionSize()
{
return targetPartitionSize;
}
@JsonProperty
public int getRowFlushBoundary()
{
return rowFlushBoundary;
}
@JsonProperty
public int getNumShards()
{
return numShards;
}
@JsonProperty
public IndexSpec getIndexSpec()
{
return indexSpec;
}
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.skyframe;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.util.Pair;
import com.google.devtools.build.skyframe.SkyFunction.Environment;
import com.google.devtools.build.skyframe.SkyFunctionException.Transience;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import javax.annotation.Nullable;
/**
* A helper class to create graphs and run skyframe tests over these graphs.
*
* <p>There are two types of values, computing values, which may not be set to a constant value,
* and leaf values, which must be set to a constant value and may not have any dependencies.
*
* <p>Note that the value builder looks into the test values created here to determine how to
* behave. However, skyframe will only re-evaluate the value and call the value builder if any of
* its dependencies has changed. That means in order to change the set of dependencies of a value,
* you need to also change one of its previous dependencies to force re-evaluation. Changing a
* computing value does not mark it as modified.
*/
public class GraphTester {
public static final SkyFunctionName NODE_TYPE = SkyFunctionName.FOR_TESTING;
private final ImmutableMap<SkyFunctionName, ? extends SkyFunction> functionMap =
ImmutableMap.of(GraphTester.NODE_TYPE, new DelegatingFunction());
private final Map<SkyKey, TestFunction> values = new HashMap<>();
private final Set<SkyKey> modifiedValues = new LinkedHashSet<>();
public TestFunction getOrCreate(String name) {
return getOrCreate(skyKey(name));
}
public TestFunction getOrCreate(SkyKey key) {
return getOrCreate(key, false);
}
public TestFunction getOrCreate(SkyKey key, boolean markAsModified) {
TestFunction result = values.get(key);
if (result == null) {
result = new TestFunction();
values.put(key, result);
} else if (markAsModified) {
modifiedValues.add(key);
}
return result;
}
public TestFunction set(String key, SkyValue value) {
return set(skyKey(key), value);
}
public TestFunction set(SkyKey key, SkyValue value) {
return getOrCreate(key, true).setConstantValue(value);
}
public ImmutableSet<SkyKey> getModifiedValues() {
return ImmutableSet.copyOf(modifiedValues);
}
public void clearModifiedValues() {
modifiedValues.clear();
}
public SkyFunction getFunction() {
return new SkyFunction() {
@Override
public SkyValue compute(SkyKey key, Environment env)
throws SkyFunctionException, InterruptedException {
TestFunction builder = values.get(key);
Preconditions.checkState(builder != null, "No TestFunction for " + key);
if (builder.builder != null) {
return builder.builder.compute(key, env);
}
if (builder.warning != null) {
env.getListener().handle(Event.warn(builder.warning));
}
if (builder.progress != null) {
env.getListener().handle(Event.progress(builder.progress));
}
Map<SkyKey, SkyValue> deps = new LinkedHashMap<>();
boolean oneMissing = false;
for (Pair<SkyKey, SkyValue> dep : builder.deps) {
SkyValue value;
if (dep.second == null) {
value = env.getValue(dep.first);
} else {
try {
value = env.getValueOrThrow(dep.first, SomeErrorException.class);
} catch (SomeErrorException e) {
value = dep.second;
}
}
if (value == null) {
oneMissing = true;
} else {
deps.put(dep.first, value);
}
Preconditions.checkState(oneMissing == env.valuesMissing());
}
if (env.valuesMissing()) {
return null;
}
if (builder.hasTransientError) {
throw new GenericFunctionException(new SomeErrorException(key.toString()),
Transience.TRANSIENT);
}
if (builder.hasError) {
throw new GenericFunctionException(new SomeErrorException(key.toString()),
Transience.PERSISTENT);
}
if (builder.value != null) {
return builder.value;
}
if (Thread.currentThread().isInterrupted()) {
throw new InterruptedException(key.toString());
}
return builder.computer.compute(deps, env);
}
@Nullable
@Override
public String extractTag(SkyKey skyKey) {
return values.get(skyKey).tag;
}
};
}
public static SkyKey skyKey(String key) {
return new SkyKey(NODE_TYPE, key);
}
/**
* A value in the testing graph that is constructed in the tester.
*/
public class TestFunction {
// TODO(bazel-team): We could use a multiset here to simulate multi-pass dependency discovery.
private final Set<Pair<SkyKey, SkyValue>> deps = new LinkedHashSet<>();
private SkyValue value;
private ValueComputer computer;
private SkyFunction builder = null;
private boolean hasTransientError;
private boolean hasError;
private String warning;
private String progress;
private String tag;
public TestFunction addDependency(String name) {
return addDependency(skyKey(name));
}
public TestFunction addDependency(SkyKey key) {
deps.add(Pair.<SkyKey, SkyValue>of(key, null));
return this;
}
public TestFunction removeDependency(String name) {
return removeDependency(skyKey(name));
}
public TestFunction removeDependency(SkyKey key) {
deps.remove(Pair.<SkyKey, SkyValue>of(key, null));
return this;
}
public TestFunction addErrorDependency(String name, SkyValue altValue) {
return addErrorDependency(skyKey(name), altValue);
}
public TestFunction addErrorDependency(SkyKey key, SkyValue altValue) {
deps.add(Pair.of(key, altValue));
return this;
}
public TestFunction setConstantValue(SkyValue value) {
Preconditions.checkState(this.computer == null);
this.value = value;
return this;
}
public TestFunction setComputedValue(ValueComputer computer) {
Preconditions.checkState(this.value == null);
this.computer = computer;
return this;
}
public TestFunction setBuilder(SkyFunction builder) {
Preconditions.checkState(this.value == null);
Preconditions.checkState(this.computer == null);
Preconditions.checkState(deps.isEmpty());
Preconditions.checkState(!hasTransientError);
Preconditions.checkState(!hasError);
Preconditions.checkState(warning == null);
Preconditions.checkState(progress == null);
this.builder = builder;
return this;
}
public TestFunction setHasTransientError(boolean hasError) {
this.hasTransientError = hasError;
return this;
}
public TestFunction setHasError(boolean hasError) {
// TODO(bazel-team): switch to an enum for hasError.
this.hasError = hasError;
return this;
}
public TestFunction setWarning(String warning) {
this.warning = warning;
return this;
}
public TestFunction setProgress(String info) {
this.progress = info;
return this;
}
public TestFunction setTag(String tag) {
this.tag = tag;
return this;
}
}
public static SkyKey[] toSkyKeys(String... names) {
SkyKey[] result = new SkyKey[names.length];
for (int i = 0; i < names.length; i++) {
result[i] = new SkyKey(GraphTester.NODE_TYPE, names[i]);
}
return result;
}
public static SkyKey toSkyKey(String name) {
return toSkyKeys(name)[0];
}
private class DelegatingFunction implements SkyFunction {
@Override
public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException,
InterruptedException {
return getFunction().compute(skyKey, env);
}
@Nullable
@Override
public String extractTag(SkyKey skyKey) {
return getFunction().extractTag(skyKey);
}
}
public ImmutableMap<SkyFunctionName, ? extends SkyFunction> getSkyFunctionMap() {
return functionMap;
}
/**
* Simple value class that stores strings.
*/
public static class StringValue implements SkyValue {
protected final String value;
public StringValue(String value) {
this.value = value;
}
public String getValue() {
return value;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof StringValue)) {
return false;
}
return value.equals(((StringValue) o).value);
}
@Override
public int hashCode() {
return value.hashCode();
}
@Override
public String toString() {
return "StringValue: " + getValue();
}
public static StringValue of(String string) {
return new StringValue(string);
}
public static StringValue from(SkyValue skyValue) {
assertThat(skyValue).isInstanceOf(StringValue.class);
return (StringValue) skyValue;
}
}
/** A StringValue that is also a NotComparableSkyValue. */
public static class NotComparableStringValue extends StringValue
implements NotComparableSkyValue {
public NotComparableStringValue(String value) {
super(value);
}
@Override
public boolean equals(Object o) {
throw new UnsupportedOperationException(value + " is incomparable - what are you doing?");
}
@Override
public int hashCode() {
throw new UnsupportedOperationException(value + " is incomparable - what are you doing?");
}
}
/**
* A callback interface to provide the value computation.
*/
public interface ValueComputer {
/** This is called when all the declared dependencies exist. It may request new dependencies. */
SkyValue compute(Map<SkyKey, SkyValue> deps, SkyFunction.Environment env)
throws InterruptedException;
}
public static final ValueComputer COPY = new ValueComputer() {
@Override
public SkyValue compute(Map<SkyKey, SkyValue> deps, SkyFunction.Environment env) {
return Iterables.getOnlyElement(deps.values());
}
};
public static final ValueComputer CONCATENATE = new ValueComputer() {
@Override
public SkyValue compute(Map<SkyKey, SkyValue> deps, SkyFunction.Environment env) {
StringBuilder result = new StringBuilder();
for (SkyValue value : deps.values()) {
result.append(((StringValue) value).value);
}
return new StringValue(result.toString());
}
};
public static ValueComputer formatter(final SkyKey key, final String format) {
return new ValueComputer() {
@Override
public SkyValue compute(Map<SkyKey, SkyValue> deps, Environment env)
throws InterruptedException {
return StringValue.of(String.format(format, StringValue.from(deps.get(key)).getValue()));
}
};
}
}
| |
/*
* Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazon.aws.samplecode.travellog.util;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.io.FileUtils;
import com.amazon.aws.samplecode.travellog.aws.S3PhotoUtil;
import com.amazon.aws.samplecode.travellog.aws.S3StorageManager;
import com.amazon.aws.samplecode.travellog.aws.TravelLogStorageObject;
import com.amazon.aws.samplecode.travellog.dao.TravelLogDAO;
import com.amazon.aws.samplecode.travellog.entity.Comment;
import com.amazon.aws.samplecode.travellog.entity.Commenter;
import com.amazon.aws.samplecode.travellog.entity.Entry;
import com.amazon.aws.samplecode.travellog.entity.Journal;
import com.amazon.aws.samplecode.travellog.entity.Photo;
/**
* This class is responsible for calling out to S3 and downloading prepackaged data
* for loading into the travellog system. The zip bundles that are downloaded are
* just a collection of properties files that map to the entities we store in the
* database.
*/
public class DataLoader implements Runnable {
private String bucketName;
private String storagePath;
//A map to keep track of the entries we're creating
private Map<Integer, Entry> entryMap = new LinkedHashMap<Integer, Entry>();
private Journal journal;
private SimpleDateFormat formatter = new SimpleDateFormat("MM/dd/yyyy");
private SimpleDateFormat hourFormatter = new SimpleDateFormat("MM/dd/yyyy h:mm:ss a z");
private TravelLogDAO dao;
private static Logger logger = Logger.getLogger(DataLoader.class.getName());
/**
* Basic constructor for the data loader, setting up where to retrieve the zip bundle from
* @param bucketName bucket for the zip bundle
* @param storagePath the storage path within the bucket that points to the zip bundle
* @param dao DAO to write the data that we're importing
*/
public DataLoader (String bucketName, String storagePath, TravelLogDAO dao) {
this.bucketName=bucketName;
this.storagePath=storagePath;
this.dao = dao;
}
public void run() {
try {
//Create S3 storage object to use in request for bundle
TravelLogStorageObject obj = new TravelLogStorageObject();
obj.setBucketName(bucketName);
obj.setStoragePath(storagePath);
// Make request to load from S3 storage. We use a client with the
// default (global) endpoint, since we may be accessing buckets from
// a region different than the one we're working in.
S3StorageManager manager = new S3StorageManager(S3StorageManager.createClient());
InputStream input = manager.loadInputStream(obj);
//Create temporary directory
File tmpDir = File.createTempFile("travellog", "");
tmpDir.delete(); //Wipe out temporary file to replace with a directory
tmpDir.mkdirs();
//Extract downloaded data to the temporary directory
TravelLogFileUtil.extractZipToDirectory(tmpDir, input);
//Clear out any previously existing data
purgeData();
//Load the new data
loadData(tmpDir);
//cleanup
tmpDir.delete();
}
catch (Exception e) {
logger.log(Level.SEVERE,e.getMessage(),e);
}
}
/**
* This method will go into a specified directory and load all the data
* into the journal. The file structure is made up of a series of
* property files to provide a simple name/value pair matching that can
* then be loaded through our SimpleJPA objects.
*
* The journal object is in a file "journal" and then entries are in sequential
* order like this:
*
* <ul>
* <li>entry.1</li>
* <li>entry.2</li>
* <li>entry.3</li>
* <ul>
*
* Photo metadata is stored with a prefix like this:
* <ul>
* <li>photo.[entry #].[sequence].txt</li>
* </ul>
*
* So for example, photos associated with entry #2 would be as follows:
* <ul>
* <li>photo.1.1</li>
* <li>photo.1.2</li>
* </ul>
*
* Photos will be loaded in the order of the sequence id. The txt file contains
* a property "file" that points to the actual image that should be loaded.
*
* Comments use the same nomenclature as photos.
*
* @param directory the directory where the imported data has been extracted to
* @throws IOException
* @throws ParseException
*/
private void loadData (File directory) throws IOException, ParseException {
//Load journal
File journalFile = new File(directory,"journal");
Properties journalProps = new Properties();
journalProps.load(new FileInputStream(journalFile));
journal = buildJournalFromProps(journalProps);
dao.saveJournal(journal);
//Load entries
File [] entries = directory.listFiles(new EntryFilter());
for (File entryFile:entries) {
Properties entryProps = new Properties();
entryProps.load(new FileInputStream(entryFile));
Entry entry = buildEntryFromProps(entryProps);
dao.saveEntry(entry);
//Parse out entry id
String [] entryNameSplit = entryFile.getName().split("\\.");
int entryId = Integer.parseInt(entryNameSplit[1]);
entryMap.put(entryId, entry);
}
//Load photos
File [] photos = directory.listFiles(new PhotoFilter());
for (File photoFile:photos) {
Properties photoProps = new Properties();
photoProps.load(new FileInputStream(photoFile));
Photo photo = buildPhotoFromProps(photoProps);
//Parse out entry id
String [] photoNameSplit = photoFile.getName().split("\\.");
int entryId = Integer.parseInt(photoNameSplit[1]);
Entry entry = entryMap.get(entryId);
photo.setEntry(entry);
dao.savePhoto(photo);
//Load jpeg file
String fileName = photoProps.getProperty("file");
File file = new File(directory,fileName);
byte [] photoData = FileUtils.readFileToByteArray(file);
photo = S3PhotoUtil.storePhoto(photo, photoData);
dao.savePhoto(photo);
}
//Load comments
File [] comments = directory.listFiles(new CommentFilter());
for (File commentFile:comments) {
Properties commentProps = new Properties();
commentProps.load(new FileInputStream(commentFile));
Comment comment = buildCommentFromProps(commentProps);
//Parse out entry id
String [] commentNameSplit = commentFile.getName().split("\\.");
int entryId = Integer.parseInt(commentNameSplit[1]);
Entry entry = entryMap.get(entryId);
comment.setEntry(entry);
dao.saveCommenter(comment.getCommenter());
dao.saveComment(comment);
}
}
private void purgeData () {
List<Journal> journals = dao.getJournals();
for (Journal journal:journals) {
List<Entry> entries = dao.getEntries(journal);
for (Entry entry:entries) {
List<Photo> photos = dao.getPhotos(entry);
for (Photo photo: photos) {
dao.deletePhoto(photo);
}
List<Comment> comments = dao.getComments(entry);
for (Comment comment: comments) {
dao.deleteCommenter(comment.getCommenter());
dao.deleteComment(comment);
}
dao.deleteEntry(entry);
}
dao.deleteJournal(journal);
}
}
/*
* The remaining methods are used to convert a property file into the
* entity we need.
*/
private Journal buildJournalFromProps (Properties props) throws ParseException {
Journal journal = new Journal();
journal.setTitle(props.getProperty("title"));
journal.setDescription(props.getProperty("description"));
journal.setStartDate(formatter.parse(props.getProperty("start_date")));
journal.setEndDate(formatter.parse(props.getProperty("end_date")));
return journal;
}
private Entry buildEntryFromProps (Properties props) throws ParseException {
Entry entry = new Entry();
entry.setTitle(props.getProperty("title"));
entry.setEntryText(props.getProperty("entry_text"));
entry.setDestination(props.getProperty("destination"));
entry.setJournal(journal);
entry.setDate(formatter.parse(props.getProperty("date")));
return entry;
}
private Photo buildPhotoFromProps (Properties props) throws ParseException {
Photo photo = new Photo();
photo.setTitle(props.getProperty("title"));
photo.setDate(formatter.parse(props.getProperty("date")));
photo.setDescription(props.getProperty("description"));
photo.setSubject(props.getProperty("subject"));
return photo;
}
private Comment buildCommentFromProps (Properties props) throws ParseException {
Comment comment = new Comment();
comment.setBody(props.getProperty("body"));
comment.setDate(hourFormatter.parse(props.getProperty("date")));
Commenter commenter = new Commenter();
commenter.setEmail(props.getProperty("commenter.email"));
commenter.setName(props.getProperty("commenter.name"));
comment.setCommenter(commenter);
return comment;
}
}
//FileFilter classes used for searching for photo, entry, and comment files
class EntryFilter implements FileFilter {
public boolean accept(File pathname) {
return pathname.getName().startsWith("entry");
}
}
class PhotoFilter implements FileFilter {
public boolean accept(File pathname) {
return pathname.getName().startsWith("photo");
}
}
class CommentFilter implements FileFilter {
public boolean accept(File pathname) {
return pathname.getName().startsWith("comment");
}
}
| |
package com.makeramen;
import android.content.Context;
import android.content.res.ColorStateList;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.LayerDrawable;
import android.net.Uri;
import android.util.AttributeSet;
import android.util.Log;
import android.widget.ImageView;
@SuppressWarnings("UnusedDeclaration")
public class RoundedImageView extends ImageView {
public static final String TAG = "RoundedImageView";
public static final int DEFAULT_RADIUS = 0;
public static final int DEFAULT_BORDER_WIDTH = 0;
private static final ScaleType[] SCALE_TYPES = {
ScaleType.MATRIX,
ScaleType.FIT_XY,
ScaleType.FIT_START,
ScaleType.FIT_CENTER,
ScaleType.FIT_END,
ScaleType.CENTER,
ScaleType.CENTER_CROP,
ScaleType.CENTER_INSIDE
};
private int mCornerRadius = DEFAULT_RADIUS;
private int mBorderWidth = DEFAULT_BORDER_WIDTH;
private ColorStateList mBorderColor =
ColorStateList.valueOf(RoundedDrawable.DEFAULT_BORDER_COLOR);
private boolean mOval = false;
private boolean mRoundBackground = false;
private int mResource;
private Drawable mDrawable;
private Drawable mBackgroundDrawable;
private ScaleType mScaleType;
public RoundedImageView(Context context) {
super(context);
}
public RoundedImageView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public RoundedImageView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.RoundedImageView, defStyle, 0);
int index = a.getInt(R.styleable.RoundedImageView_android_scaleType, -1);
if (index >= 0) {
setScaleType(SCALE_TYPES[index]);
} else {
// default scaletype to FIT_CENTER
setScaleType(ScaleType.FIT_CENTER);
}
mCornerRadius = a.getDimensionPixelSize(R.styleable.RoundedImageView_corner_radius, -1);
mBorderWidth = a.getDimensionPixelSize(R.styleable.RoundedImageView_border_width, -1);
// don't allow negative values for radius and border
if (mCornerRadius < 0) {
mCornerRadius = DEFAULT_RADIUS;
}
if (mBorderWidth < 0) {
mBorderWidth = DEFAULT_BORDER_WIDTH;
}
mBorderColor = a.getColorStateList(R.styleable.RoundedImageView_border_color);
if (mBorderColor == null) {
mBorderColor = ColorStateList.valueOf(RoundedDrawable.DEFAULT_BORDER_COLOR);
}
mRoundBackground = a.getBoolean(R.styleable.RoundedImageView_round_background, false);
mOval = a.getBoolean(R.styleable.RoundedImageView_is_oval, false);
updateDrawableAttrs();
updateBackgroundDrawableAttrs();
a.recycle();
}
@Override
protected void drawableStateChanged() {
super.drawableStateChanged();
invalidate();
}
/**
* Return the current scale type in use by this ImageView.
*
* @attr ref android.R.styleable#ImageView_scaleType
* @see android.widget.ImageView.ScaleType
*/
@Override
public ScaleType getScaleType() {
return mScaleType;
}
/**
* Controls how the image should be resized or moved to match the size
* of this ImageView.
*
* @param scaleType The desired scaling mode.
* @attr ref android.R.styleable#ImageView_scaleType
*/
@Override
public void setScaleType(ScaleType scaleType) {
if (scaleType == null) {
throw new NullPointerException();
}
if (mScaleType != scaleType) {
mScaleType = scaleType;
switch (scaleType) {
case CENTER:
case CENTER_CROP:
case CENTER_INSIDE:
case FIT_CENTER:
case FIT_START:
case FIT_END:
case FIT_XY:
super.setScaleType(ScaleType.FIT_XY);
break;
default:
super.setScaleType(scaleType);
break;
}
updateDrawableAttrs();
updateBackgroundDrawableAttrs();
invalidate();
}
}
@Override
public void setImageDrawable(Drawable drawable) {
mResource = 0;
mDrawable = RoundedDrawable.fromDrawable(drawable);
updateDrawableAttrs();
super.setImageDrawable(mDrawable);
}
@Override
public void setImageBitmap(Bitmap bm) {
mResource = 0;
mDrawable = RoundedDrawable.fromBitmap(bm);
updateDrawableAttrs();
super.setImageDrawable(mDrawable);
}
@Override
public void setImageResource(int resId) {
if (mResource != resId) {
mResource = resId;
mDrawable = resolveResource();
updateDrawableAttrs();
super.setImageDrawable(mDrawable);
}
}
@Override public void setImageURI(Uri uri) {
super.setImageURI(uri);
setImageDrawable(getDrawable());
}
private Drawable resolveResource() {
Resources rsrc = getResources();
if (rsrc == null) {
return null;
}
Drawable d = null;
if (mResource != 0) {
try {
d = rsrc.getDrawable(mResource);
} catch (Exception e) {
Log.w(TAG, "Unable to find resource: " + mResource, e);
// Don't try again.
mResource = 0;
}
}
return RoundedDrawable.fromDrawable(d);
}
@Override
public void setBackground(Drawable background) {
setBackgroundDrawable(background);
}
private void updateDrawableAttrs() {
updateAttrs(mDrawable, false);
}
private void updateBackgroundDrawableAttrs() {
updateAttrs(mBackgroundDrawable, true);
}
private void updateAttrs(Drawable drawable, boolean background) {
if (drawable == null) {
return;
}
if (drawable instanceof RoundedDrawable) {
((RoundedDrawable) drawable)
.setScaleType(mScaleType)
.setCornerRadius(background && !mRoundBackground ? 0 : mCornerRadius)
.setBorderWidth(background && !mRoundBackground ? 0 : mBorderWidth)
.setBorderColors(mBorderColor)
.setOval(mOval);
} else if (drawable instanceof LayerDrawable) {
// loop through layers to and set drawable attrs
LayerDrawable ld = ((LayerDrawable) drawable);
int layers = ld.getNumberOfLayers();
for (int i = 0; i < layers; i++) {
updateAttrs(ld.getDrawable(i), background);
}
}
}
@Override
@Deprecated
public void setBackgroundDrawable(Drawable background) {
mBackgroundDrawable = RoundedDrawable.fromDrawable(background);
updateBackgroundDrawableAttrs();
super.setBackgroundDrawable(mBackgroundDrawable);
}
public int getCornerRadius() {
return mCornerRadius;
}
public void setCornerRadius(int radius) {
if (mCornerRadius == radius) {
return;
}
mCornerRadius = radius;
updateDrawableAttrs();
updateBackgroundDrawableAttrs();
}
public int getBorderWidth() {
return mBorderWidth;
}
public void setBorderWidth(int width) {
if (mBorderWidth == width) {
return;
}
mBorderWidth = width;
updateDrawableAttrs();
updateBackgroundDrawableAttrs();
invalidate();
}
public int getBorderColor() {
return mBorderColor.getDefaultColor();
}
public void setBorderColor(int color) {
setBorderColors(ColorStateList.valueOf(color));
}
public ColorStateList getBorderColors() {
return mBorderColor;
}
public void setBorderColors(ColorStateList colors) {
if (mBorderColor.equals(colors)) {
return;
}
mBorderColor =
(colors != null) ? colors : ColorStateList.valueOf(RoundedDrawable.DEFAULT_BORDER_COLOR);
updateDrawableAttrs();
updateBackgroundDrawableAttrs();
if (mBorderWidth > 0) {
invalidate();
}
}
public boolean isOval() {
return mOval;
}
public void setOval(boolean oval) {
mOval = oval;
updateDrawableAttrs();
updateBackgroundDrawableAttrs();
invalidate();
}
public boolean isRoundBackground() {
return mRoundBackground;
}
public void setRoundBackground(boolean roundBackground) {
if (mRoundBackground == roundBackground) {
return;
}
mRoundBackground = roundBackground;
updateBackgroundDrawableAttrs();
invalidate();
}
}
| |
/*
* Copyright 2008-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaleidofoundry.core.context;
import static org.kaleidofoundry.core.cache.CacheContextBuilder.CacheManagerRef;
import static org.kaleidofoundry.core.cache.CacheContextBuilder.CacheName;
import static org.kaleidofoundry.core.cache.CacheManagerContextBuilder.ProviderCode;
import static org.kaleidofoundry.core.config.ConfigurationContextBuilder.FileStoreUri;
import static org.kaleidofoundry.core.i18n.I18nContextBuilder.BaseName;
import static org.kaleidofoundry.core.store.FileStoreContextBuilder.BaseUri;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceUnit;
import org.kaleidofoundry.core.cache.Cache;
import org.kaleidofoundry.core.cache.CacheManager;
import org.kaleidofoundry.core.config.Configuration;
import org.kaleidofoundry.core.i18n.I18nMessages;
import org.kaleidofoundry.core.naming.NamingService;
import org.kaleidofoundry.core.store.FileStore;
/**
* @author jraduget
*/
@Singleton
public class MyServiceJavaEE implements MyService {
@Inject
@PersistenceContext(unitName = "kaleido")
private EntityManager entityManager;
@Inject
@PersistenceUnit(unitName = "kaleido")
private EntityManagerFactory entityManagerFactory;
@Inject
@Context
private RuntimeContext<?> myContext;
@Inject
@Context("namedCtx")
private RuntimeContext<?> myNamedContext;
@Inject
@Context(value = "myStoreCtx", parameters = { @Parameter(name = BaseUri, value = "classpath:/store") })
private FileStore myStore;
@Inject
@Context(parameters = { @Parameter(name = FileStoreUri, value = "classpath:/config/myConfig.properties") })
private Configuration myConfig;
@Inject
@Context
private CacheManager myDefaultCacheManager;
@Inject
@Context(parameters = { @Parameter(name = ProviderCode, value = "infinispan") })
private CacheManager myCustomCacheManager;
@Inject
@Context
private Cache<Integer, String> myDefaultCache;
@Inject
@Context(parameters = { @Parameter(name = CacheName, value = "myNamedCache"), @Parameter(name = CacheManagerRef, value = "myCustomCacheManager") })
private Cache<Integer, String> myCustomCache;
@Inject
@Context
private I18nMessages myDefaultMessages;
@Inject
@Context(parameters = { @Parameter(name = BaseName, value = "i18n/messages") })
private I18nMessages myBaseMessages;
@Inject
@Context
private NamingService myNamingService;
/*
* (non-Javadoc)
* @see org.kaleidofoundry.core.context.MyService#getMyContext()
*/
@Override
public RuntimeContext<?> getMyContext() {
return myContext;
}
/*
* (non-Javadoc)
* @see org.kaleidofoundry.core.context.MyService#getMyNamedContext()
*/
@Override
public RuntimeContext<?> getMyNamedContext() {
return myNamedContext;
}
/*
* (non-Javadoc)
* @see org.kaleidofoundry.core.context.MyService#getMyStore()
*/
@Override
public FileStore getMyStore() {
return myStore;
}
/*
* (non-Javadoc)
* @see org.kaleidofoundry.core.context.MyService#getMyConfig()
*/
@Override
public Configuration getMyConfig() {
return myConfig;
}
/*
* (non-Javadoc)
* @see org.kaleidofoundry.core.context.MyService#getMyDefaultCacheManager()
*/
@Override
public CacheManager getMyDefaultCacheManager() {
return myDefaultCacheManager;
}
/*
* (non-Javadoc)
* @see org.kaleidofoundry.core.context.MyService#getMyCustomCacheManager()
*/
@Override
public CacheManager getMyCustomCacheManager() {
return myCustomCacheManager;
}
/*
* (non-Javadoc)
* @see org.kaleidofoundry.core.context.MyService#getMyDefaultCache()
*/
@Override
public Cache<Integer, String> getMyDefaultCache() {
return myDefaultCache;
}
/*
* (non-Javadoc)
* @see org.kaleidofoundry.core.context.MyService#getMyCustomCache()
*/
@Override
public Cache<Integer, String> getMyCustomCache() {
return myCustomCache;
}
/*
* (non-Javadoc)
* @see org.kaleidofoundry.core.context.MyService#getMyDefaultMessages()
*/
@Override
public I18nMessages getMyDefaultMessages() {
return myDefaultMessages;
}
/*
* (non-Javadoc)
* @see org.kaleidofoundry.core.context.MyService#getMyBaseMessages()
*/
@Override
public I18nMessages getMyBaseMessages() {
return myBaseMessages;
}
/*
* (non-Javadoc)
* @see org.kaleidofoundry.core.context.MyService#getMyNamingService()
*/
@Override
public NamingService getMyNamingService() {
return myNamingService;
}
/*
* (non-Javadoc)
* @see org.kaleidofoundry.core.context.MyService#getEntityManagerFactory()
*/
@Override
public EntityManagerFactory getEntityManagerFactory() {
return entityManagerFactory;
}
/*
* (non-Javadoc)
* @see org.kaleidofoundry.core.context.MyService#getEntityManager()
*/
@Override
public EntityManager getEntityManager() {
return entityManager;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.jaas;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.Principal;
import java.text.MessageFormat;
import java.util.*;
import javax.naming.*;
import javax.naming.directory.Attribute;
import javax.naming.directory.Attributes;
import javax.naming.directory.DirContext;
import javax.naming.directory.InitialDirContext;
import javax.naming.directory.SearchControls;
import javax.naming.directory.SearchResult;
import javax.security.auth.Subject;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.FailedLoginException;
import javax.security.auth.login.LoginException;
import javax.security.auth.spi.LoginModule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @version $Rev: $ $Date: $
*/
public class LDAPLoginModule implements LoginModule {
private static final String INITIAL_CONTEXT_FACTORY = "initialContextFactory";
private static final String CONNECTION_URL = "connectionURL";
private static final String CONNECTION_USERNAME = "connectionUsername";
private static final String CONNECTION_PASSWORD = "connectionPassword";
private static final String CONNECTION_PROTOCOL = "connectionProtocol";
private static final String AUTHENTICATION = "authentication";
private static final String USER_BASE = "userBase";
private static final String USER_SEARCH_MATCHING = "userSearchMatching";
private static final String USER_SEARCH_SUBTREE = "userSearchSubtree";
private static final String ROLE_BASE = "roleBase";
private static final String ROLE_NAME = "roleName";
private static final String ROLE_SEARCH_MATCHING = "roleSearchMatching";
private static final String ROLE_SEARCH_SUBTREE = "roleSearchSubtree";
private static final String USER_ROLE_NAME = "userRoleName";
private static final String EXPAND_ROLES = "expandRoles";
private static final String EXPAND_ROLES_MATCHING = "expandRolesMatching";
private static Logger log = LoggerFactory.getLogger(LDAPLoginModule.class);
protected DirContext context;
private Subject subject;
private CallbackHandler handler;
private LDAPLoginProperty [] config;
private String username;
private Set<GroupPrincipal> groups = new HashSet<GroupPrincipal>();
@Override
public void initialize(Subject subject, CallbackHandler callbackHandler, Map sharedState, Map options) {
this.subject = subject;
this.handler = callbackHandler;
config = new LDAPLoginProperty [] {
new LDAPLoginProperty (INITIAL_CONTEXT_FACTORY, (String)options.get(INITIAL_CONTEXT_FACTORY)),
new LDAPLoginProperty (CONNECTION_URL, (String)options.get(CONNECTION_URL)),
new LDAPLoginProperty (CONNECTION_USERNAME, (String)options.get(CONNECTION_USERNAME)),
new LDAPLoginProperty (CONNECTION_PASSWORD, (String)options.get(CONNECTION_PASSWORD)),
new LDAPLoginProperty (CONNECTION_PROTOCOL, (String)options.get(CONNECTION_PROTOCOL)),
new LDAPLoginProperty (AUTHENTICATION, (String)options.get(AUTHENTICATION)),
new LDAPLoginProperty (USER_BASE, (String)options.get(USER_BASE)),
new LDAPLoginProperty (USER_SEARCH_MATCHING, (String)options.get(USER_SEARCH_MATCHING)),
new LDAPLoginProperty (USER_SEARCH_SUBTREE, (String)options.get(USER_SEARCH_SUBTREE)),
new LDAPLoginProperty (ROLE_BASE, (String)options.get(ROLE_BASE)),
new LDAPLoginProperty (ROLE_NAME, (String)options.get(ROLE_NAME)),
new LDAPLoginProperty (ROLE_SEARCH_MATCHING, (String)options.get(ROLE_SEARCH_MATCHING)),
new LDAPLoginProperty (ROLE_SEARCH_SUBTREE, (String)options.get(ROLE_SEARCH_SUBTREE)),
new LDAPLoginProperty (USER_ROLE_NAME, (String)options.get(USER_ROLE_NAME)),
new LDAPLoginProperty (EXPAND_ROLES, (String) options.get(EXPAND_ROLES)),
new LDAPLoginProperty (EXPAND_ROLES_MATCHING, (String) options.get(EXPAND_ROLES_MATCHING)),
};
}
@Override
public boolean login() throws LoginException {
Callback[] callbacks = new Callback[2];
callbacks[0] = new NameCallback("User name");
callbacks[1] = new PasswordCallback("Password", false);
try {
handler.handle(callbacks);
} catch (IOException ioe) {
throw (LoginException)new LoginException().initCause(ioe);
} catch (UnsupportedCallbackException uce) {
throw (LoginException)new LoginException().initCause(uce);
}
String password;
username = ((NameCallback)callbacks[0]).getName();
if (username == null)
return false;
if (((PasswordCallback)callbacks[1]).getPassword() != null)
password = new String(((PasswordCallback)callbacks[1]).getPassword());
else
password="";
// authenticate will throw LoginException
// in case of failed authentication
authenticate(username, password);
return true;
}
@Override
public boolean logout() throws LoginException {
username = null;
return true;
}
@Override
public boolean commit() throws LoginException {
Set<Principal> principals = subject.getPrincipals();
principals.add(new UserPrincipal(username));
for (GroupPrincipal gp : groups) {
principals.add(gp);
}
return true;
}
@Override
public boolean abort() throws LoginException {
username = null;
return true;
}
protected void close(DirContext context) {
try {
context.close();
} catch (Exception e) {
log.error(e.toString());
}
}
protected boolean authenticate(String username, String password) throws LoginException {
MessageFormat userSearchMatchingFormat;
boolean userSearchSubtreeBool;
DirContext context = null;
if (log.isDebugEnabled()) {
log.debug("Create the LDAP initial context.");
}
try {
context = open();
} catch (NamingException ne) {
FailedLoginException ex = new FailedLoginException("Error opening LDAP connection");
ex.initCause(ne);
throw ex;
}
if (!isLoginPropertySet(USER_SEARCH_MATCHING))
return false;
userSearchMatchingFormat = new MessageFormat(getLDAPPropertyValue(USER_SEARCH_MATCHING));
userSearchSubtreeBool = Boolean.valueOf(getLDAPPropertyValue(USER_SEARCH_SUBTREE)).booleanValue();
try {
String filter = userSearchMatchingFormat.format(new String[] {
doRFC2254Encoding(username)
});
SearchControls constraints = new SearchControls();
if (userSearchSubtreeBool) {
constraints.setSearchScope(SearchControls.SUBTREE_SCOPE);
} else {
constraints.setSearchScope(SearchControls.ONELEVEL_SCOPE);
}
// setup attributes
List<String> list = new ArrayList<String>();
if (isLoginPropertySet(USER_ROLE_NAME)) {
list.add(getLDAPPropertyValue(USER_ROLE_NAME));
}
String[] attribs = new String[list.size()];
list.toArray(attribs);
constraints.setReturningAttributes(attribs);
if (log.isDebugEnabled()) {
log.debug("Get the user DN.");
log.debug("Looking for the user in LDAP with ");
log.debug(" base DN: " + getLDAPPropertyValue(USER_BASE));
log.debug(" filter: " + filter);
}
NamingEnumeration<SearchResult> results = context.search(getLDAPPropertyValue(USER_BASE), filter, constraints);
if (results == null || !results.hasMore()) {
log.warn("User " + username + " not found in LDAP.");
throw new FailedLoginException("User " + username + " not found in LDAP.");
}
SearchResult result = results.next();
if (results.hasMore()) {
// ignore for now
}
String dn;
if (result.isRelative()) {
log.debug("LDAP returned a relative name: {}", result.getName());
NameParser parser = context.getNameParser("");
Name contextName = parser.parse(context.getNameInNamespace());
Name baseName = parser.parse(getLDAPPropertyValue(USER_BASE));
Name entryName = parser.parse(result.getName());
Name name = contextName.addAll(baseName);
name = name.addAll(entryName);
dn = name.toString();
} else {
log.debug("LDAP returned an absolute name: {}", result.getName());
try {
URI uri = new URI(result.getName());
String path = uri.getPath();
if (path.startsWith("/")) {
dn = path.substring(1);
} else {
dn = path;
}
} catch (URISyntaxException e) {
if (context != null) {
close(context);
}
FailedLoginException ex = new FailedLoginException("Error parsing absolute name as URI.");
ex.initCause(e);
throw ex;
}
}
if (log.isDebugEnabled()) {
log.debug("Using DN [" + dn + "] for binding.");
}
Attributes attrs = result.getAttributes();
if (attrs == null) {
throw new FailedLoginException("User found, but LDAP entry malformed: " + username);
}
List<String> roles = null;
if (isLoginPropertySet(USER_ROLE_NAME)) {
roles = addAttributeValues(getLDAPPropertyValue(USER_ROLE_NAME), attrs, roles);
}
// check the credentials by binding to server
if (bindUser(context, dn, password)) {
// if authenticated add more roles
roles = getRoles(context, dn, username, roles);
if (log.isDebugEnabled()) {
log.debug("Roles " + roles + " for user " + username);
}
for (int i = 0; i < roles.size(); i++) {
groups.add(new GroupPrincipal(roles.get(i)));
}
} else {
throw new FailedLoginException("Password does not match for user: " + username);
}
} catch (CommunicationException e) {
FailedLoginException ex = new FailedLoginException("Error contacting LDAP");
ex.initCause(e);
throw ex;
} catch (NamingException e) {
if (context != null) {
close(context);
}
FailedLoginException ex = new FailedLoginException("Error contacting LDAP");
ex.initCause(e);
throw ex;
}
return true;
}
protected List<String> getRoles(DirContext context, String dn, String username, List<String> currentRoles) throws NamingException {
List<String> list = currentRoles;
MessageFormat roleSearchMatchingFormat;
boolean roleSearchSubtreeBool;
boolean expandRolesBool;
roleSearchMatchingFormat = new MessageFormat(getLDAPPropertyValue(ROLE_SEARCH_MATCHING));
roleSearchSubtreeBool = Boolean.valueOf(getLDAPPropertyValue(ROLE_SEARCH_SUBTREE)).booleanValue();
expandRolesBool = Boolean.valueOf(getLDAPPropertyValue(EXPAND_ROLES)).booleanValue();
if (list == null) {
list = new ArrayList<String>();
}
if (!isLoginPropertySet(ROLE_NAME)) {
return list;
}
String filter = roleSearchMatchingFormat.format(new String[] {
doRFC2254Encoding(dn), doRFC2254Encoding(username)
});
SearchControls constraints = new SearchControls();
if (roleSearchSubtreeBool) {
constraints.setSearchScope(SearchControls.SUBTREE_SCOPE);
} else {
constraints.setSearchScope(SearchControls.ONELEVEL_SCOPE);
}
if (log.isDebugEnabled()) {
log.debug("Get user roles.");
log.debug("Looking for the user roles in LDAP with ");
log.debug(" base DN: " + getLDAPPropertyValue(ROLE_BASE));
log.debug(" filter: " + filter);
}
HashSet<String> haveSeenNames = new HashSet<String>();
Queue<String> pendingNameExpansion = new LinkedList<String>();
NamingEnumeration<SearchResult> results = context.search(getLDAPPropertyValue(ROLE_BASE), filter, constraints);
while (results.hasMore()) {
SearchResult result = results.next();
Attributes attrs = result.getAttributes();
if (expandRolesBool) {
haveSeenNames.add(result.getNameInNamespace());
pendingNameExpansion.add(result.getNameInNamespace());
}
if (attrs == null) {
continue;
}
list = addAttributeValues(getLDAPPropertyValue(ROLE_NAME), attrs, list);
}
if (expandRolesBool) {
MessageFormat expandRolesMatchingFormat = new MessageFormat(getLDAPPropertyValue(EXPAND_ROLES_MATCHING));
while (!pendingNameExpansion.isEmpty()) {
String name = pendingNameExpansion.remove();
filter = expandRolesMatchingFormat.format(new String[]{name});
results = context.search(getLDAPPropertyValue(ROLE_BASE), filter, constraints);
while (results.hasMore()) {
SearchResult result = results.next();
name = result.getNameInNamespace();
if (!haveSeenNames.contains(name)) {
Attributes attrs = result.getAttributes();
list = addAttributeValues(getLDAPPropertyValue(ROLE_NAME), attrs, list);
haveSeenNames.add(name);
pendingNameExpansion.add(name);
}
}
}
}
return list;
}
protected String doRFC2254Encoding(String inputString) {
StringBuffer buf = new StringBuffer(inputString.length());
for (int i = 0; i < inputString.length(); i++) {
char c = inputString.charAt(i);
switch (c) {
case '\\':
buf.append("\\5c");
break;
case '*':
buf.append("\\2a");
break;
case '(':
buf.append("\\28");
break;
case ')':
buf.append("\\29");
break;
case '\0':
buf.append("\\00");
break;
default:
buf.append(c);
break;
}
}
return buf.toString();
}
protected boolean bindUser(DirContext context, String dn, String password) throws NamingException {
boolean isValid = false;
if (log.isDebugEnabled()) {
log.debug("Binding the user.");
}
context.addToEnvironment(Context.SECURITY_PRINCIPAL, dn);
context.addToEnvironment(Context.SECURITY_CREDENTIALS, password);
try {
context.getAttributes("", null);
isValid = true;
if (log.isDebugEnabled()) {
log.debug("User " + dn + " successfully bound.");
}
} catch (AuthenticationException e) {
isValid = false;
if (log.isDebugEnabled()) {
log.debug("Authentication failed for dn=" + dn);
}
}
if (isLoginPropertySet(CONNECTION_USERNAME)) {
context.addToEnvironment(Context.SECURITY_PRINCIPAL, getLDAPPropertyValue(CONNECTION_USERNAME));
} else {
context.removeFromEnvironment(Context.SECURITY_PRINCIPAL);
}
if (isLoginPropertySet(CONNECTION_PASSWORD)) {
context.addToEnvironment(Context.SECURITY_CREDENTIALS, getLDAPPropertyValue(CONNECTION_PASSWORD));
} else {
context.removeFromEnvironment(Context.SECURITY_CREDENTIALS);
}
return isValid;
}
private List<String> addAttributeValues(String attrId, Attributes attrs, List<String> values) throws NamingException {
if (attrId == null || attrs == null) {
return values;
}
if (values == null) {
values = new ArrayList<String>();
}
Attribute attr = attrs.get(attrId);
if (attr == null) {
return values;
}
NamingEnumeration<?> e = attr.getAll();
while (e.hasMore()) {
String value = (String)e.next();
values.add(value);
}
return values;
}
protected DirContext open() throws NamingException {
try {
Hashtable<String, String> env = new Hashtable<String, String>();
env.put(Context.INITIAL_CONTEXT_FACTORY, getLDAPPropertyValue(INITIAL_CONTEXT_FACTORY));
if (isLoginPropertySet(CONNECTION_USERNAME)) {
env.put(Context.SECURITY_PRINCIPAL, getLDAPPropertyValue(CONNECTION_USERNAME));
} else {
throw new NamingException("Empty username is not allowed");
}
if (isLoginPropertySet(CONNECTION_PASSWORD)) {
env.put(Context.SECURITY_CREDENTIALS, getLDAPPropertyValue(CONNECTION_PASSWORD));
} else {
throw new NamingException("Empty password is not allowed");
}
env.put(Context.SECURITY_PROTOCOL, getLDAPPropertyValue(CONNECTION_PROTOCOL));
env.put(Context.PROVIDER_URL, getLDAPPropertyValue(CONNECTION_URL));
env.put(Context.SECURITY_AUTHENTICATION, getLDAPPropertyValue(AUTHENTICATION));
context = new InitialDirContext(env);
} catch (NamingException e) {
log.error(e.toString());
throw e;
}
return context;
}
private String getLDAPPropertyValue (String propertyName){
for (int i=0; i < config.length; i++ )
if (config[i].getPropertyName() == propertyName)
return config[i].getPropertyValue();
return null;
}
private boolean isLoginPropertySet(String propertyName) {
for (int i=0; i < config.length; i++ ) {
if (config[i].getPropertyName() == propertyName && (config[i].getPropertyValue() != null && !"".equals(config[i].getPropertyValue())))
return true;
}
return false;
}
}
| |
package com.sequenceiq.freeipa.service.stack;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Supplier;
import java.util.stream.Stream;
import javax.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.sequenceiq.authorization.service.EnvironmentPropertyProvider;
import com.sequenceiq.cloudbreak.auth.ThreadBasedUserCrnProvider;
import com.sequenceiq.cloudbreak.auth.crn.Crn;
import com.sequenceiq.cloudbreak.common.event.PayloadContext;
import com.sequenceiq.cloudbreak.common.exception.BadRequestException;
import com.sequenceiq.cloudbreak.common.exception.NotFoundException;
import com.sequenceiq.cloudbreak.logger.MDCBuilder;
import com.sequenceiq.cloudbreak.quartz.model.JobResource;
import com.sequenceiq.flow.core.PayloadContextProvider;
import com.sequenceiq.freeipa.api.v1.freeipa.stack.model.common.DetailedStackStatus;
import com.sequenceiq.freeipa.api.v1.freeipa.stack.model.common.Status;
import com.sequenceiq.freeipa.dto.StackIdWithStatus;
import com.sequenceiq.freeipa.entity.ImageEntity;
import com.sequenceiq.freeipa.entity.Stack;
import com.sequenceiq.freeipa.repository.StackRepository;
@Service
public class StackService implements EnvironmentPropertyProvider, PayloadContextProvider {
private static final Logger LOGGER = LoggerFactory.getLogger(StackService.class);
@VisibleForTesting
Supplier<LocalDateTime> nowSupplier = LocalDateTime::now;
@Inject
private StackRepository stackRepository;
@Inject
private ChildEnvironmentService childEnvironmentService;
public List<Stack> findAllRunning() {
return stackRepository.findAllRunning();
}
public List<JobResource> findAllForAutoSync() {
return stackRepository.findAllRunningAndStatusIn(List.of(
Status.AVAILABLE,
Status.UPDATE_FAILED,
Status.START_FAILED,
Status.STOP_FAILED,
Status.UNREACHABLE,
Status.UNHEALTHY,
Status.UNKNOWN,
Status.STOPPED,
Status.START_IN_PROGRESS,
Status.STOP_IN_PROGRESS,
Status.STOP_REQUESTED,
Status.START_REQUESTED));
}
public Stack getByIdWithListsInTransaction(Long id) {
return stackRepository.findOneWithLists(id).orElseThrow(() -> new NotFoundException(String.format("FreeIPA stack [%s] not found", id)));
}
public Stack getStackById(Long id) {
return stackRepository.findById(id).orElseThrow(() -> new NotFoundException(String.format("FreeIPA stack [%s] not found", id)));
}
@Override
public PayloadContext getPayloadContext(Long resourceId) {
return stackRepository.getStackAsPayloadContextById(resourceId).orElse(null);
}
public Stack getByEnvironmentCrnAndAccountIdWithListsAndMdcContext(String environmentCrn, String accountId) {
Stack stack = getByEnvironmentCrnAndAccountIdWithLists(environmentCrn, accountId);
MDCBuilder.buildMdcContext(stack);
return stack;
}
public Stack save(Stack stack) {
return stackRepository.save(stack);
}
public Stack getByEnvironmentCrnAndAccountId(String environmentCrn, String accountId) {
return findByEnvironmentCrnAndAccountId(environmentCrn, accountId)
.or(() -> childEnvironmentService.findParentByEnvironmentCrnAndAccountId(environmentCrn, accountId))
.orElseThrow(() -> new NotFoundException(String.format("FreeIPA stack by environment [%s] not found", environmentCrn)));
}
public Stack getByCrnAndAccountIdEvenIfTerminated(String environmentCrn, String accountId, String crn) {
return findByCrnAndAccountIdWithListsEvenIfTerminated(environmentCrn, accountId, crn)
.orElseThrow(() -> new NotFoundException(String.format("FreeIPA stack by environment [%s] with CRN [%s] not found", environmentCrn, crn)));
}
public Optional<Stack> findByEnvironmentCrnAndAccountId(String environmentCrn, String accountId) {
return stackRepository.findByEnvironmentCrnAndAccountId(environmentCrn, accountId)
.or(() -> childEnvironmentService.findParentByEnvironmentCrnAndAccountId(environmentCrn, accountId));
}
public Optional<Stack> findByCrnAndAccountIdWithListsEvenIfTerminated(String environmentCrn, String accountId, String crn) {
return stackRepository.findByAccountIdEnvironmentCrnAndCrnWithListsEvenIfTerminated(environmentCrn, accountId, crn)
.or(() -> childEnvironmentService.findParentStackByChildEnvironmentCrnAndCrnWithListsEvenIfTerminated(environmentCrn, accountId, crn));
}
public List<Stack> findMultipleByEnvironmentCrnAndAccountIdEvenIfTerminated(String environmentCrn, String accountId) {
List<Stack> stacks = stackRepository.findMultipleByEnvironmentCrnAndAccountIdEvenIfTerminated(environmentCrn, accountId);
if (stacks.isEmpty()) {
stacks = childEnvironmentService.findMultipleParentStackByChildEnvironmentCrnEvenIfTerminated(environmentCrn, accountId);
}
return stacks;
}
public List<Stack> findMultipleByEnvironmentCrnAndAccountIdEvenIfTerminatedWithList(String environmentCrn, String accountId) {
List<Stack> stacks = stackRepository.findMultipleByEnvironmentCrnAndAccountIdEvenIfTerminatedWithList(environmentCrn, accountId);
if (stacks.isEmpty()) {
stacks = childEnvironmentService.findMultipleParentStackByChildEnvironmentCrnEvenIfTerminatedWithList(environmentCrn, accountId);
}
return stacks;
}
public List<Stack> getMultipleByEnvironmentCrnOrChildEnvironmantCrnAndAccountId(Collection<String> environmentCrns, String accountId) {
if (environmentCrns.isEmpty()) {
return Lists.newArrayList(getAllByAccountId(accountId));
} else {
return stackRepository.findMultipleByEnvironmentCrnOrChildEnvironmentCrnAndAccountId(environmentCrns, accountId);
}
}
public List<Stack> findAllByEnvironmentCrnAndAccountId(String environmentCrn, String accountId) {
return stackRepository.findAllByEnvironmentCrnAndAccountId(environmentCrn, accountId);
}
public List<Long> findAllIdByEnvironmentCrnAndAccountId(String environmentCrn, String accountId) {
return stackRepository.findAllIdByEnvironmentCrnAndAccountId(environmentCrn, accountId);
}
public Long getIdByEnvironmentCrnAndAccountId(String environmentCrn, String accountId) {
List<Long> ids = stackRepository.findAllIdByEnvironmentCrnAndAccountId(environmentCrn, accountId);
if (ids.isEmpty()) {
throw new NotFoundException(String.format("FreeIPA stack by environment [%s] not found", environmentCrn));
} else if (ids.size() > 1) {
throw new BadRequestException(String.format("Multiple FreeIPA stack by environment [%s] found", environmentCrn));
} else {
return ids.get(0);
}
}
public Stack getByEnvironmentCrnAndAccountIdWithLists(String environmentCrn, String accountId) {
return stackRepository.findByEnvironmentCrnAndAccountIdWithList(environmentCrn, accountId)
.or(() -> stackRepository.findByChildEnvironmentCrnAndAccountIdWithList(environmentCrn, accountId))
.orElseThrow(() -> new NotFoundException(String.format("FreeIPA stack by environment [%s] not found", environmentCrn)));
}
public Stack getByOwnEnvironmentCrnAndAccountIdWithLists(String environmentCrn, String accountId) {
return stackRepository.findByEnvironmentCrnAndAccountIdWithList(environmentCrn, accountId)
.orElseThrow(() -> new NotFoundException(String.format("FreeIPA stack by environment [%s] not found", environmentCrn)));
}
public Set<Stack> getAllByAccountId(String accountId) {
return stackRepository.findByAccountId(accountId);
}
public List<StackIdWithStatus> getStatuses(Set<Long> stackIds) {
return stackRepository.findStackStatusesWithoutAuth(stackIds);
}
public List<Stack> findAllWithStatuses(Collection<Status> statuses) {
return stackRepository.findAllWithStatuses(statuses);
}
public List<Stack> findAllWithDetailedStackStatuses(Collection<DetailedStackStatus> detailedStackStatuses) {
return stackRepository.findAllWithDetailedStackStatuses(detailedStackStatuses);
}
public List<Stack> findAllByAccountIdWithStatuses(String accountId, Collection<Status> statuses) {
return stackRepository.findByAccountIdWithStatuses(accountId, statuses);
}
public List<Stack> findMultipleByEnvironmentCrnAndAccountIdWithStatuses(Collection<String> environmentCrns, String accountId, Collection<Status> statuses) {
if (environmentCrns.isEmpty()) {
return findAllByAccountIdWithStatuses(accountId, statuses);
} else {
return stackRepository.findMultipleByEnvironmentCrnAndAccountIdWithStatuses(environmentCrns, accountId, statuses);
}
}
@Override
public Map<String, Optional<String>> getNamesByCrnsForMessage(Collection<String> crns) {
Map<String, Optional<String>> result = new HashMap<>();
Boolean envType = Optional.ofNullable(crns)
.map(Collection::stream)
.flatMap(Stream::findFirst)
.map(Crn::fromString)
.map(Crn::getResourceType)
.map(type -> type == Crn.ResourceType.ENVIRONMENT)
.orElse(Boolean.FALSE);
if (envType) {
stackRepository.findNamesByEnvironmentCrnAndAccountId(crns, ThreadBasedUserCrnProvider.getAccountId())
.forEach(nameAndCrn -> result.put(nameAndCrn.getCrn(), Optional.ofNullable(nameAndCrn.getName())));
} else {
stackRepository.findNamesByResourceCrnAndAccountId(crns, ThreadBasedUserCrnProvider.getAccountId())
.forEach(nameAndCrn -> result.put(nameAndCrn.getCrn(), Optional.ofNullable(nameAndCrn.getName())));
}
return result;
}
@Override
public EnumSet<Crn.ResourceType> getSupportedCrnResourceTypes() {
return EnumSet.of(Crn.ResourceType.FREEIPA, Crn.ResourceType.ENVIRONMENT);
}
public List<ImageEntity> getImagesOfAliveStacks(Integer thresholdInDays) {
final LocalDateTime thresholdDate = nowSupplier.get()
.minusDays(Optional.ofNullable(thresholdInDays).orElse(0));
final long thresholdTimestamp = Timestamp.valueOf(thresholdDate).getTime();
return stackRepository.findImagesOfAliveStacks(thresholdTimestamp);
}
public Stack getFreeIpaStackWithMdcContext(String envCrn, String accountId) {
LOGGER.debug("Looking for stack using env:{} and accountId:{}", envCrn, accountId);
Stack stack = getByEnvironmentCrnAndAccountId(envCrn, accountId);
MDCBuilder.buildMdcContext(stack);
LOGGER.debug("Stack is fetched for env:{} and accountId:{} ", envCrn, accountId);
return stack;
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.impl.bpmn.behavior;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.flowable.bpmn.model.Activity;
import org.flowable.bpmn.model.BoundaryEvent;
import org.flowable.bpmn.model.CallActivity;
import org.flowable.bpmn.model.CompensateEventDefinition;
import org.flowable.bpmn.model.FlowElement;
import org.flowable.bpmn.model.SubProcess;
import org.flowable.bpmn.model.Transaction;
import org.flowable.engine.common.api.FlowableIllegalArgumentException;
import org.flowable.engine.common.impl.interceptor.CommandContext;
import org.flowable.engine.common.impl.util.CollectionUtil;
import org.flowable.engine.delegate.DelegateExecution;
import org.flowable.engine.impl.bpmn.helper.ScopeUtil;
import org.flowable.engine.impl.delegate.ActivityBehavior;
import org.flowable.engine.impl.persistence.entity.ExecutionEntity;
import org.flowable.engine.impl.persistence.entity.ExecutionEntityManager;
import org.flowable.engine.impl.util.CommandContextUtil;
/**
* @author Joram Barrez
* @author Tijs Rademakers
*/
public class ParallelMultiInstanceBehavior extends MultiInstanceActivityBehavior {
private static final long serialVersionUID = 1L;
public ParallelMultiInstanceBehavior(Activity activity, AbstractBpmnActivityBehavior originalActivityBehavior) {
super(activity, originalActivityBehavior);
}
/**
* Handles the parallel case of spawning the instances. Will create child executions accordingly for every instance needed.
*/
@Override
protected int createInstances(DelegateExecution multiInstanceRootExecution) {
int nrOfInstances = resolveNrOfInstances(multiInstanceRootExecution);
if (nrOfInstances < 0) {
throw new FlowableIllegalArgumentException("Invalid number of instances: must be non-negative integer value" + ", but was " + nrOfInstances);
}
setLoopVariable(multiInstanceRootExecution, NUMBER_OF_INSTANCES, nrOfInstances);
setLoopVariable(multiInstanceRootExecution, NUMBER_OF_COMPLETED_INSTANCES, 0);
setLoopVariable(multiInstanceRootExecution, NUMBER_OF_ACTIVE_INSTANCES, nrOfInstances);
List<ExecutionEntity> concurrentExecutions = new ArrayList<>();
for (int loopCounter = 0; loopCounter < nrOfInstances; loopCounter++) {
ExecutionEntity concurrentExecution = CommandContextUtil.getExecutionEntityManager()
.createChildExecution((ExecutionEntity) multiInstanceRootExecution);
concurrentExecution.setCurrentFlowElement(activity);
concurrentExecution.setActive(true);
concurrentExecution.setScope(false);
concurrentExecutions.add(concurrentExecution);
logLoopDetails(concurrentExecution, "initialized", loopCounter, 0, nrOfInstances, nrOfInstances);
//CommandContextUtil.getHistoryManager().recordActivityStart(concurrentExecution);
}
// Before the activities are executed, all executions MUST be created up front
// Do not try to merge this loop with the previous one, as it will lead
// to bugs, due to possible child execution pruning.
for (int loopCounter = 0; loopCounter < nrOfInstances; loopCounter++) {
ExecutionEntity concurrentExecution = concurrentExecutions.get(loopCounter);
// executions can be inactive, if instances are all automatics
// (no-waitstate) and completionCondition has been met in the meantime
if (concurrentExecution.isActive()
&& !concurrentExecution.isEnded()
&& !concurrentExecution.getParent().isEnded()) {
executeOriginalBehavior(concurrentExecution, loopCounter);
}
}
// See ACT-1586: ExecutionQuery returns wrong results when using multi
// instance on a receive task The parent execution must be set to false, so it wouldn't show up in
// the execution query when using .activityId(something). Do not we cannot nullify the
// activityId (that would have been a better solution), as it would break boundary event behavior.
if (!concurrentExecutions.isEmpty()) {
multiInstanceRootExecution.setActive(false);
}
return nrOfInstances;
}
/**
* Called when the wrapped {@link ActivityBehavior} calls the {@link AbstractBpmnActivityBehavior#leave(DelegateExecution)} method. Handles the completion of one of the parallel instances
*/
@Override
public void leave(DelegateExecution execution) {
boolean zeroNrOfInstances = false;
if (resolveNrOfInstances(execution) == 0) {
// Empty collection, just leave.
zeroNrOfInstances = true;
super.leave(execution); // Plan the default leave
}
int loopCounter = getLoopVariable(execution, getCollectionElementIndexVariable());
int nrOfInstances = getLoopVariable(execution, NUMBER_OF_INSTANCES);
int nrOfCompletedInstances = getLoopVariable(execution, NUMBER_OF_COMPLETED_INSTANCES) + 1;
int nrOfActiveInstances = getLoopVariable(execution, NUMBER_OF_ACTIVE_INSTANCES) - 1;
CommandContextUtil.getHistoryManager().recordActivityEnd((ExecutionEntity) execution, null);
callActivityEndListeners(execution);
if (zeroNrOfInstances) {
return;
}
DelegateExecution miRootExecution = getMultiInstanceRootExecution(execution);
if (miRootExecution != null) { // will be null in case of empty collection
setLoopVariable(miRootExecution, NUMBER_OF_COMPLETED_INSTANCES, nrOfCompletedInstances);
setLoopVariable(miRootExecution, NUMBER_OF_ACTIVE_INSTANCES, nrOfActiveInstances);
}
logLoopDetails(execution, "instance completed", loopCounter, nrOfCompletedInstances, nrOfActiveInstances, nrOfInstances);
ExecutionEntity executionEntity = (ExecutionEntity) execution;
if (executionEntity.getParent() != null) {
executionEntity.inactivate();
lockFirstParentScope(executionEntity);
boolean isCompletionConditionSatisfied = completionConditionSatisfied(execution.getParent());
if (nrOfCompletedInstances >= nrOfInstances || isCompletionConditionSatisfied) {
ExecutionEntity leavingExecution = null;
if (nrOfInstances > 0) {
leavingExecution = executionEntity.getParent();
} else {
CommandContextUtil.getHistoryManager().recordActivityEnd((ExecutionEntity) execution, null);
leavingExecution = executionEntity;
}
Activity activity = (Activity) execution.getCurrentFlowElement();
verifyCompensation(execution, leavingExecution, activity);
verifyCallActivity(leavingExecution, activity);
if (isCompletionConditionSatisfied) {
LinkedList<DelegateExecution> toVerify = new LinkedList<>(miRootExecution.getExecutions());
while (!toVerify.isEmpty()) {
DelegateExecution childExecution = toVerify.pop();
if (((ExecutionEntity) childExecution).isInserted()) {
childExecution.inactivate();
}
List<DelegateExecution> childExecutions = (List<DelegateExecution>) childExecution.getExecutions();
if (childExecutions != null && !childExecutions.isEmpty()) {
toVerify.addAll(childExecutions);
}
}
sendCompletedWithConditionEvent(leavingExecution);
}
else {
sendCompletedEvent(leavingExecution);
}
// Clean up execution that resulted in the mult-instance finishing so that cancelled events aren't sent for it.
ExecutionEntityManager executionEntityManager = CommandContextUtil.getExecutionEntityManager();
executionEntityManager.deleteChildExecutions(executionEntity, DELETE_REASON_END, false);
executionEntityManager.deleteExecutionAndRelatedData(executionEntity, DELETE_REASON_END);
super.leave(leavingExecution);
}
} else {
sendCompletedEvent(execution);
super.leave(execution);
}
}
protected Activity verifyCompensation(DelegateExecution execution, ExecutionEntity executionToUse, Activity activity) {
boolean hasCompensation = false;
if (activity instanceof Transaction) {
hasCompensation = true;
} else if (activity instanceof SubProcess) {
SubProcess subProcess = (SubProcess) activity;
for (FlowElement subElement : subProcess.getFlowElements()) {
if (subElement instanceof Activity) {
Activity subActivity = (Activity) subElement;
if (CollectionUtil.isNotEmpty(subActivity.getBoundaryEvents())) {
for (BoundaryEvent boundaryEvent : subActivity.getBoundaryEvents()) {
if (CollectionUtil.isNotEmpty(boundaryEvent.getEventDefinitions()) &&
boundaryEvent.getEventDefinitions().get(0) instanceof CompensateEventDefinition) {
hasCompensation = true;
break;
}
}
}
}
}
}
if (hasCompensation) {
ScopeUtil.createCopyOfSubProcessExecutionForCompensation(executionToUse);
}
return activity;
}
protected void verifyCallActivity(ExecutionEntity executionToUse, Activity activity) {
if (activity instanceof CallActivity) {
ExecutionEntityManager executionEntityManager = CommandContextUtil.getExecutionEntityManager();
if (executionToUse != null) {
List<String> callActivityExecutionIds = new ArrayList<>();
// Find all execution entities that are at the call activity
List<ExecutionEntity> childExecutions = executionEntityManager.collectChildren(executionToUse);
if (childExecutions != null) {
for (ExecutionEntity childExecution : childExecutions) {
if (activity.getId().equals(childExecution.getCurrentActivityId())) {
callActivityExecutionIds.add(childExecution.getId());
}
}
// Now all call activity executions have been collected, loop again and check which should be removed
for (int i = childExecutions.size() - 1; i >= 0; i--) {
ExecutionEntity childExecution = childExecutions.get(i);
if (StringUtils.isNotEmpty(childExecution.getSuperExecutionId())
&& callActivityExecutionIds.contains(childExecution.getSuperExecutionId())) {
executionEntityManager.deleteProcessInstanceExecutionEntity(childExecution.getId(), activity.getId(),
"call activity completion condition met", true, false, true);
}
}
}
}
}
}
protected void lockFirstParentScope(DelegateExecution execution) {
ExecutionEntityManager executionEntityManager = CommandContextUtil.getExecutionEntityManager();
boolean found = false;
ExecutionEntity parentScopeExecution = null;
ExecutionEntity currentExecution = (ExecutionEntity) execution;
while (!found && currentExecution != null && currentExecution.getParentId() != null) {
parentScopeExecution = executionEntityManager.findById(currentExecution.getParentId());
if (parentScopeExecution != null && parentScopeExecution.isScope()) {
found = true;
}
currentExecution = parentScopeExecution;
}
parentScopeExecution.forceUpdate();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.hbase;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.apache.camel.Exchange;
import org.apache.camel.component.hbase.filters.ModelAwareFilter;
import org.apache.camel.component.hbase.mapping.CellMappingStrategy;
import org.apache.camel.component.hbase.mapping.CellMappingStrategyFactory;
import org.apache.camel.component.hbase.model.HBaseCell;
import org.apache.camel.component.hbase.model.HBaseData;
import org.apache.camel.component.hbase.model.HBaseRow;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.util.ObjectHelper;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.util.Bytes;
/**
* The HBase producer.
*/
public class HBaseProducer extends DefaultProducer {
private HBaseEndpoint endpoint;
private HBaseRow rowModel;
public HBaseProducer(HBaseEndpoint endpoint) {
super(endpoint);
this.endpoint = endpoint;
this.rowModel = endpoint.getRowModel();
}
public void process(Exchange exchange) throws Exception {
try (Table table = endpoint.getTable()) {
updateHeaders(exchange);
String operation = (String) exchange.getIn().getHeader(HBaseConstants.OPERATION);
Integer maxScanResult = exchange.getIn().getHeader(HBaseConstants.HBASE_MAX_SCAN_RESULTS, Integer.class);
String fromRowId = (String) exchange.getIn().getHeader(HBaseConstants.FROM_ROW);
String stopRowId = (String) exchange.getIn().getHeader(HBaseConstants.STOP_ROW);
CellMappingStrategy mappingStrategy = endpoint.getCellMappingStrategyFactory().getStrategy(exchange.getIn());
HBaseData data = mappingStrategy.resolveModel(exchange.getIn());
List<Put> putOperations = new LinkedList<>();
List<Delete> deleteOperations = new LinkedList<>();
List<HBaseRow> getOperationResult = new LinkedList<>();
List<HBaseRow> scanOperationResult = new LinkedList<>();
for (HBaseRow hRow : data.getRows()) {
hRow.apply(rowModel);
if (HBaseConstants.PUT.equals(operation)) {
putOperations.add(createPut(hRow));
} else if (HBaseConstants.GET.equals(operation)) {
HBaseRow getResultRow = getCells(table, hRow);
getOperationResult.add(getResultRow);
} else if (HBaseConstants.DELETE.equals(operation)) {
deleteOperations.add(createDeleteRow(hRow));
} else if (HBaseConstants.SCAN.equals(operation)) {
scanOperationResult = scanCells(table, hRow, fromRowId, stopRowId, maxScanResult, endpoint.getFilters());
}
}
//Check if we have something to add.
if (!putOperations.isEmpty()) {
table.put(putOperations);
} else if (!deleteOperations.isEmpty()) {
table.delete(deleteOperations);
} else if (!getOperationResult.isEmpty()) {
mappingStrategy.applyGetResults(exchange.getOut(), new HBaseData(getOperationResult));
} else if (!scanOperationResult.isEmpty()) {
mappingStrategy.applyScanResults(exchange.getOut(), new HBaseData(scanOperationResult));
}
}
}
/**
* Creates an HBase {@link Put} on a specific row, using a collection of values (family/column/value pairs).
*
* @param hRow
* @throws Exception
*/
private Put createPut(HBaseRow hRow) throws Exception {
ObjectHelper.notNull(hRow, "HBase row");
ObjectHelper.notNull(hRow.getId(), "HBase row id");
ObjectHelper.notNull(hRow.getCells(), "HBase cells");
Put put = new Put(endpoint.getCamelContext().getTypeConverter().convertTo(byte[].class, hRow.getId()));
Set<HBaseCell> cells = hRow.getCells();
for (HBaseCell cell : cells) {
String family = cell.getFamily();
String column = cell.getQualifier();
Object value = cell.getValue();
ObjectHelper.notNull(family, "HBase column family", cell);
ObjectHelper.notNull(column, "HBase column", cell);
put.addColumn(
HBaseHelper.getHBaseFieldAsBytes(family),
HBaseHelper.getHBaseFieldAsBytes(column),
endpoint.getCamelContext().getTypeConverter().convertTo(byte[].class, value)
);
}
return put;
}
/**
* Performs an HBase {@link Get} on a specific row, using a collection of values (family/column/value pairs).
* The result is <p>the most recent entry</p> for each column.
*/
private HBaseRow getCells(Table table, HBaseRow hRow) throws Exception {
HBaseRow resultRow = new HBaseRow();
List<HBaseCell> resultCells = new LinkedList<>();
ObjectHelper.notNull(hRow, "HBase row");
ObjectHelper.notNull(hRow.getId(), "HBase row id");
ObjectHelper.notNull(hRow.getCells(), "HBase cells");
resultRow.setId(hRow.getId());
Get get = new Get(endpoint.getCamelContext().getTypeConverter().convertTo(byte[].class, hRow.getId()));
Set<HBaseCell> cellModels = hRow.getCells();
for (HBaseCell cellModel : cellModels) {
String family = cellModel.getFamily();
String column = cellModel.getQualifier();
ObjectHelper.notNull(family, "HBase column family", cellModel);
ObjectHelper.notNull(column, "HBase column", cellModel);
get.addColumn(HBaseHelper.getHBaseFieldAsBytes(family), HBaseHelper.getHBaseFieldAsBytes(column));
}
Result result = table.get(get);
if (!result.isEmpty()) {
resultRow.setTimestamp(result.rawCells()[0].getTimestamp());
}
for (HBaseCell cellModel : cellModels) {
HBaseCell resultCell = new HBaseCell();
String family = cellModel.getFamily();
String column = cellModel.getQualifier();
resultCell.setFamily(family);
resultCell.setQualifier(column);
List<Cell> kvs = result.getColumnCells(HBaseHelper.getHBaseFieldAsBytes(family), HBaseHelper.getHBaseFieldAsBytes(column));
if (kvs != null && !kvs.isEmpty()) {
//Return the most recent entry.
resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(cellModel.getValueType(), CellUtil.cloneValue(kvs.get(0))));
resultCell.setTimestamp(kvs.get(0).getTimestamp());
}
resultCells.add(resultCell);
resultRow.getCells().add(resultCell);
}
return resultRow;
}
/**
* Creates an HBase {@link Delete} on a specific row, using a collection of values (family/column/value pairs).
*/
private Delete createDeleteRow(HBaseRow hRow) throws Exception {
ObjectHelper.notNull(hRow, "HBase row");
ObjectHelper.notNull(hRow.getId(), "HBase row id");
return new Delete(endpoint.getCamelContext().getTypeConverter().convertTo(byte[].class, hRow.getId()));
}
/**
* Performs an HBase {@link Get} on a specific row, using a collection of values (family/column/value pairs).
* The result is <p>the most recent entry</p> for each column.
*/
private List<HBaseRow> scanCells(Table table, HBaseRow model, String start, String stop, Integer maxRowScan, List<Filter> filters)
throws Exception {
List<HBaseRow> rowSet = new LinkedList<>();
HBaseRow startRow = new HBaseRow(model.getCells());
startRow.setId(start);
Scan scan;
if (start != null) {
scan = new Scan(Bytes.toBytes(start));
} else {
scan = new Scan();
}
if (ObjectHelper.isNotEmpty(stop)) {
scan.setStopRow(Bytes.toBytes(stop));
}
if (filters != null && !filters.isEmpty()) {
for (int i = 0; i < filters.size(); i++) {
((ModelAwareFilter<?>) filters.get(i)).apply(endpoint.getCamelContext(), model);
scan.setFilter(new FilterList(FilterList.Operator.MUST_PASS_ALL, ((ModelAwareFilter<?>) filters.get(i)).getFilteredList()));
}
}
Set<HBaseCell> cellModels = model.getCells();
for (HBaseCell cellModel : cellModels) {
String family = cellModel.getFamily();
String column = cellModel.getQualifier();
if (ObjectHelper.isNotEmpty(family) && ObjectHelper.isNotEmpty(column)) {
scan.addColumn(HBaseHelper.getHBaseFieldAsBytes(family), HBaseHelper.getHBaseFieldAsBytes(column));
}
}
ResultScanner resultScanner = table.getScanner(scan);
int count = 0;
Result result = resultScanner.next();
while (result != null && count < maxRowScan) {
HBaseRow resultRow = new HBaseRow();
resultRow.setId(endpoint.getCamelContext().getTypeConverter().convertTo(model.getRowType(), result.getRow()));
resultRow.setTimestamp(result.rawCells()[0].getTimestamp());
cellModels = model.getCells();
for (HBaseCell modelCell : cellModels) {
HBaseCell resultCell = new HBaseCell();
String family = modelCell.getFamily();
String column = modelCell.getQualifier();
resultRow.setId(endpoint.getCamelContext().getTypeConverter().convertTo(
model.getRowType(),
result.getRow())
);
resultCell.setValue(endpoint.getCamelContext().getTypeConverter().convertTo(
modelCell.getValueType(),
result.getValue(HBaseHelper.getHBaseFieldAsBytes(family), HBaseHelper.getHBaseFieldAsBytes(column)))
);
resultCell.setFamily(modelCell.getFamily());
resultCell.setQualifier(modelCell.getQualifier());
Cell cell = result.getColumnLatestCell(HBaseHelper.getHBaseFieldAsBytes(family), HBaseHelper.getHBaseFieldAsBytes(column));
if (cell != null) {
resultCell.setTimestamp(cell.getTimestamp());
}
resultRow.getCells().add(resultCell);
}
rowSet.add(resultRow);
count++;
result = resultScanner.next();
}
return rowSet;
}
/**
* This methods fill possible gaps in the {@link Exchange} headers, with values passed from the Endpoint.
*/
private void updateHeaders(Exchange exchange) {
if (exchange != null && exchange.getIn() != null) {
if (endpoint.getMaxResults() != 0 && exchange.getIn().getHeader(HBaseConstants.HBASE_MAX_SCAN_RESULTS) == null) {
exchange.getIn().setHeader(HBaseConstants.HBASE_MAX_SCAN_RESULTS, endpoint.getMaxResults());
}
if (endpoint.getMappingStrategyName() != null && exchange.getIn().getHeader(CellMappingStrategyFactory.STRATEGY) == null) {
exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY, endpoint.getMappingStrategyName());
}
if (endpoint.getMappingStrategyName() != null
&& exchange.getIn().getHeader(CellMappingStrategyFactory.STRATEGY_CLASS_NAME) == null) {
exchange.getIn().setHeader(CellMappingStrategyFactory.STRATEGY_CLASS_NAME, endpoint.getMappingStrategyClassName());
}
if (endpoint.getOperation() != null && exchange.getIn().getHeader(HBaseConstants.OPERATION) == null) {
exchange.getIn().setHeader(HBaseConstants.OPERATION, endpoint.getOperation());
} else if (endpoint.getOperation() == null && exchange.getIn().getHeader(HBaseConstants.OPERATION) == null) {
exchange.getIn().setHeader(HBaseConstants.OPERATION, HBaseConstants.PUT);
}
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.conversion.impl;
import com.intellij.conversion.*;
import com.intellij.openapi.components.StorageScheme;
import java.io.File;
import java.util.*;
/**
* @author nik
*/
public class ConversionRunner {
private final ConverterProvider myProvider;
private final ConversionContextImpl myContext;
private final ConversionProcessor<ModuleSettings> myModuleFileConverter;
private final ConversionProcessor<ProjectSettings> myProjectFileConverter;
private final ConversionProcessor<WorkspaceSettings> myWorkspaceConverter;
private boolean myProcessProjectFile;
private boolean myProcessWorkspaceFile;
private boolean myProcessRunConfigurations;
private boolean myProcessProjectLibraries;
private boolean myArtifacts;
private final List<File> myModulesFilesToProcess = new ArrayList<>();
private final ProjectConverter myConverter;
private final ConversionProcessor<RunManagerSettings> myRunConfigurationsConverter;
private final ConversionProcessor<ProjectLibrariesSettings> myProjectLibrariesConverter;
private final ConversionProcessor<ArtifactsSettings> myArtifactsConverter;
public ConversionRunner(ConverterProvider provider, ConversionContextImpl context) {
myProvider = provider;
myContext = context;
myConverter = provider.createConverter(context);
myModuleFileConverter = myConverter.createModuleFileConverter();
myProjectFileConverter = myConverter.createProjectFileConverter();
myWorkspaceConverter = myConverter.createWorkspaceFileConverter();
myRunConfigurationsConverter = myConverter.createRunConfigurationsConverter();
myProjectLibrariesConverter = myConverter.createProjectLibrariesConverter();
myArtifactsConverter = myConverter.createArtifactsConverter();
}
public boolean isConversionNeeded() throws CannotConvertException {
if (myContext.isConversionAlreadyPerformed(myProvider)) return false;
myProcessProjectFile = myContext.getStorageScheme() == StorageScheme.DEFAULT && myProjectFileConverter != null
&& myProjectFileConverter.isConversionNeeded(myContext.getProjectSettings());
myProcessWorkspaceFile = myWorkspaceConverter != null && myContext.getWorkspaceFile().exists()
&& myWorkspaceConverter.isConversionNeeded(myContext.getWorkspaceSettings());
myModulesFilesToProcess.clear();
if (myModuleFileConverter != null) {
for (File moduleFile : myContext.getModuleFiles()) {
if (moduleFile.exists() && myModuleFileConverter.isConversionNeeded(myContext.getModuleSettings(moduleFile))) {
myModulesFilesToProcess.add(moduleFile);
}
}
}
myProcessRunConfigurations = myRunConfigurationsConverter != null
&& myRunConfigurationsConverter.isConversionNeeded(myContext.getRunManagerSettings());
myProcessProjectLibraries = myProjectLibrariesConverter != null
&& myProjectLibrariesConverter.isConversionNeeded(myContext.getProjectLibrariesSettings());
myArtifacts = myArtifactsConverter != null
&& myArtifactsConverter.isConversionNeeded(myContext.getArtifactsSettings());
return myProcessProjectFile ||
myProcessWorkspaceFile ||
myProcessRunConfigurations ||
myProcessProjectLibraries ||
!myModulesFilesToProcess.isEmpty() ||
myConverter.isConversionNeeded();
}
public boolean isModuleConversionNeeded(File moduleFile) throws CannotConvertException {
return myModuleFileConverter != null && myModuleFileConverter.isConversionNeeded(myContext.getModuleSettings(moduleFile));
}
public Collection<File> getCreatedFiles() {
return myConverter.getCreatedFiles();
}
public Set<File> getAffectedFiles() {
Set<File> affectedFiles = new HashSet<>();
if (myProcessProjectFile) {
affectedFiles.add(myContext.getProjectFile());
}
if (myProcessWorkspaceFile) {
affectedFiles.add(myContext.getWorkspaceFile());
}
affectedFiles.addAll(myModulesFilesToProcess);
try {
if (myProcessRunConfigurations) {
affectedFiles.addAll(myContext.getRunManagerSettings().getAffectedFiles());
}
if (myProcessProjectLibraries) {
affectedFiles.addAll(myContext.getProjectLibrariesSettings().getAffectedFiles());
}
if (myArtifacts) {
affectedFiles.addAll(myContext.getArtifactsSettings().getAffectedFiles());
}
}
catch (CannotConvertException ignored) {
}
if (!myProvider.canDetermineIfConversionAlreadyPerformedByProjectFiles()) {
final ComponentManagerSettings settings = myContext.getProjectFileVersionSettings();
if (settings != null) {
affectedFiles.add(settings.getFile());
}
}
affectedFiles.addAll(myConverter.getAdditionalAffectedFiles());
return affectedFiles;
}
public void preProcess() throws CannotConvertException {
if (myProcessProjectFile) {
myProjectFileConverter.preProcess(myContext.getProjectSettings());
}
if (myProcessWorkspaceFile) {
myWorkspaceConverter.preProcess(myContext.getWorkspaceSettings());
}
for (File moduleFile : myModulesFilesToProcess) {
myModuleFileConverter.preProcess(myContext.getModuleSettings(moduleFile));
}
if (myProcessRunConfigurations) {
myRunConfigurationsConverter.preProcess(myContext.getRunManagerSettings());
}
if (myProcessProjectLibraries) {
myProjectLibrariesConverter.preProcess(myContext.getProjectLibrariesSettings());
}
if (myArtifacts) {
myArtifactsConverter.preProcess(myContext.getArtifactsSettings());
}
myConverter.preProcessingFinished();
}
public void process() throws CannotConvertException {
if (myProcessProjectFile) {
myProjectFileConverter.process(myContext.getProjectSettings());
}
if (myProcessWorkspaceFile) {
myWorkspaceConverter.process(myContext.getWorkspaceSettings());
}
for (File moduleFile : myModulesFilesToProcess) {
myModuleFileConverter.process(myContext.getModuleSettings(moduleFile));
}
if (myProcessRunConfigurations) {
myRunConfigurationsConverter.process(myContext.getRunManagerSettings());
}
if (myProcessProjectLibraries) {
myProjectLibrariesConverter.process(myContext.getProjectLibrariesSettings());
}
if (myArtifacts) {
myArtifactsConverter.process(myContext.getArtifactsSettings());
}
myConverter.processingFinished();
}
public void postProcess() throws CannotConvertException {
if (myProcessProjectFile) {
myProjectFileConverter.postProcess(myContext.getProjectSettings());
}
if (myProcessWorkspaceFile) {
myWorkspaceConverter.postProcess(myContext.getWorkspaceSettings());
}
for (File moduleFile : myModulesFilesToProcess) {
myModuleFileConverter.postProcess(myContext.getModuleSettings(moduleFile));
}
if (myProcessRunConfigurations) {
myRunConfigurationsConverter.postProcess(myContext.getRunManagerSettings());
}
if (myProcessProjectLibraries) {
myProjectLibrariesConverter.postProcess(myContext.getProjectLibrariesSettings());
}
if (myArtifacts) {
myArtifactsConverter.postProcess(myContext.getArtifactsSettings());
}
myConverter.postProcessingFinished();
}
public ConverterProvider getProvider() {
return myProvider;
}
public static List<File> getReadOnlyFiles(final Collection<? extends File> affectedFiles) {
List<File> result = new ArrayList<>();
for (File file : affectedFiles) {
if (!file.canWrite()) {
result.add(file);
}
}
return result;
}
public void convertModule(File moduleFile) throws CannotConvertException {
final ModuleSettings settings = myContext.getModuleSettings(moduleFile);
myModuleFileConverter.preProcess(settings);
myModuleFileConverter.process(settings);
myModuleFileConverter.postProcess(settings);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.hdfs.blobstore;
import java.io.ByteArrayOutputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.security.auth.Subject;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.storm.Config;
import org.apache.storm.blobstore.AtomicOutputStream;
import org.apache.storm.blobstore.BlobStore;
import org.apache.storm.blobstore.BlobStoreAclHandler;
import org.apache.storm.blobstore.BlobStoreFile;
import org.apache.storm.blobstore.InputStreamWithMeta;
import org.apache.storm.generated.AuthorizationException;
import org.apache.storm.generated.KeyAlreadyExistsException;
import org.apache.storm.generated.KeyNotFoundException;
import org.apache.storm.generated.ReadableBlobMeta;
import org.apache.storm.generated.SettableBlobMeta;
import org.apache.storm.nimbus.NimbusInfo;
import org.apache.storm.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.storm.blobstore.BlobStoreAclHandler.ADMIN;
import static org.apache.storm.blobstore.BlobStoreAclHandler.READ;
import static org.apache.storm.blobstore.BlobStoreAclHandler.WRITE;
/**
* Provides a HDFS file system backed blob store implementation.
* Note that this provides an api for having HDFS be the backing store for the blobstore,
* it is not a service/daemon.
*
* We currently have NIMBUS_ADMINS and SUPERVISOR_ADMINS configuration. NIMBUS_ADMINS are given READ, WRITE and ADMIN
* access whereas the SUPERVISOR_ADMINS are given READ access in order to read and download the blobs form the nimbus.
*
* The ACLs for the blob store are validated against whether the subject is a NIMBUS_ADMIN, SUPERVISOR_ADMIN or USER
* who has read, write or admin privileges in order to perform respective operations on the blob.
*
* For hdfs blob store
* 1. The USER interacts with nimbus to upload and access blobs through NimbusBlobStore Client API. Here, unlike
* local blob store which stores the blobs locally, the nimbus talks to HDFS to upload the blobs.
* 2. The USER sets the ACLs, and the blob access is validated against these ACLs.
* 3. The SUPERVISOR interacts with nimbus through HdfsClientBlobStore to download the blobs. Here, unlike local
* blob store the supervisor interacts with HDFS directly to download the blobs. The call to HdfsBlobStore is made as a "null"
* subject. The blobstore gets the hadoop user and validates permissions for the supervisor.
*/
public class HdfsBlobStore extends BlobStore {
public static final Logger LOG = LoggerFactory.getLogger(HdfsBlobStore.class);
private static final String DATA_PREFIX = "data_";
private static final String META_PREFIX = "meta_";
private static final HashMap<String, Subject> alreadyLoggedInUsers = new HashMap<>();
private BlobStoreAclHandler aclHandler;
private HdfsBlobStoreImpl hbs;
private Subject localSubject;
private Map<String, Object> conf;
/**
* Get the subject from Hadoop so we can use it to validate the acls. There is no direct
* interface from UserGroupInformation to get the subject, so do a doAs and get the context.
* We could probably run everything in the doAs but for now just grab the subject.
*/
private Subject getHadoopUser() {
Subject subj;
try {
subj = UserGroupInformation.getCurrentUser().doAs(
new PrivilegedAction<Subject>() {
@Override
public Subject run() {
return Subject.getSubject(AccessController.getContext());
}
});
} catch (IOException e) {
throw new RuntimeException("Error creating subject and logging user in!", e);
}
return subj;
}
/**
* If who is null then we want to use the user hadoop says we are.
* Required for the supervisor to call these routines as its not
* logged in as anyone.
*/
private Subject checkAndGetSubject(Subject who) {
if (who == null) {
return localSubject;
}
return who;
}
@Override
public void prepare(Map<String, Object> conf, String overrideBase, NimbusInfo nimbusInfo) {
this.conf = conf;
prepareInternal(conf, overrideBase, null);
}
/**
* Allow a Hadoop Configuration to be passed for testing. If it's null then the hadoop configs
* must be in your classpath.
*/
protected void prepareInternal(Map<String, Object> conf, String overrideBase, Configuration hadoopConf) {
this.conf = conf;
if (overrideBase == null) {
overrideBase = (String)conf.get(Config.BLOBSTORE_DIR);
}
if (overrideBase == null) {
throw new RuntimeException("You must specify a blobstore directory for HDFS to use!");
}
LOG.debug("directory is: {}", overrideBase);
try {
// if a HDFS keytab/principal have been supplied login, otherwise assume they are
// logged in already or running insecure HDFS.
String principal = (String) conf.get(Config.BLOBSTORE_HDFS_PRINCIPAL);
String keyTab = (String) conf.get(Config.BLOBSTORE_HDFS_KEYTAB);
if (principal != null && keyTab != null) {
String combinedKey = principal + " from " + keyTab;
synchronized (alreadyLoggedInUsers) {
localSubject = alreadyLoggedInUsers.get(combinedKey);
if (localSubject == null) {
UserGroupInformation.loginUserFromKeytab(principal, keyTab);
localSubject = getHadoopUser();
alreadyLoggedInUsers.put(combinedKey, localSubject);
}
}
} else {
if (principal == null && keyTab != null) {
throw new RuntimeException("You must specify an HDFS principal to go with the keytab!");
} else {
if (principal != null && keyTab == null) {
throw new RuntimeException("You must specify HDFS keytab go with the principal!");
}
}
localSubject = getHadoopUser();
}
} catch (IOException e) {
throw new RuntimeException("Error logging in from keytab!", e);
}
aclHandler = new BlobStoreAclHandler(conf);
Path baseDir = new Path(overrideBase, BASE_BLOBS_DIR_NAME);
try {
if (hadoopConf != null) {
hbs = new HdfsBlobStoreImpl(baseDir, conf, hadoopConf);
} else {
hbs = new HdfsBlobStoreImpl(baseDir, conf);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public AtomicOutputStream createBlob(String key, SettableBlobMeta meta, Subject who)
throws AuthorizationException, KeyAlreadyExistsException {
if (meta.get_replication_factor() <= 0) {
meta.set_replication_factor((int)conf.get(Config.STORM_BLOBSTORE_REPLICATION_FACTOR));
}
who = checkAndGetSubject(who);
validateKey(key);
aclHandler.normalizeSettableBlobMeta(key, meta, who, READ | WRITE | ADMIN);
BlobStoreAclHandler.validateSettableACLs(key, meta.get_acl());
aclHandler.hasPermissions(meta.get_acl(), READ | WRITE | ADMIN, who, key);
if (hbs.exists(DATA_PREFIX + key)) {
throw new KeyAlreadyExistsException(key);
}
BlobStoreFileOutputStream mOut = null;
try {
BlobStoreFile metaFile = hbs.write(META_PREFIX + key, true);
metaFile.setMetadata(meta);
mOut = new BlobStoreFileOutputStream(metaFile);
mOut.write(Utils.thriftSerialize(meta));
mOut.close();
mOut = null;
BlobStoreFile dataFile = hbs.write(DATA_PREFIX + key, true);
dataFile.setMetadata(meta);
return new BlobStoreFileOutputStream(dataFile);
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
if (mOut != null) {
try {
mOut.cancel();
} catch (IOException e) {
//Ignored
}
}
}
}
@Override
public AtomicOutputStream updateBlob(String key, Subject who)
throws AuthorizationException, KeyNotFoundException {
who = checkAndGetSubject(who);
SettableBlobMeta meta = getStoredBlobMeta(key);
validateKey(key);
aclHandler.hasPermissions(meta.get_acl(), WRITE, who, key);
try {
BlobStoreFile dataFile = hbs.write(DATA_PREFIX + key, false);
dataFile.setMetadata(meta);
return new BlobStoreFileOutputStream(dataFile);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private SettableBlobMeta getStoredBlobMeta(String key) throws KeyNotFoundException {
InputStream in = null;
try {
BlobStoreFile pf = hbs.read(META_PREFIX + key);
try {
in = pf.getInputStream();
} catch (FileNotFoundException fnf) {
throw new KeyNotFoundException(key);
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
byte[] buffer = new byte[2048];
int len;
while ((len = in.read(buffer)) > 0) {
out.write(buffer, 0, len);
}
in.close();
in = null;
return Utils.thriftDeserialize(SettableBlobMeta.class, out.toByteArray());
} catch (IOException e) {
throw new RuntimeException(e);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
//Ignored
}
}
}
}
@Override
public ReadableBlobMeta getBlobMeta(String key, Subject who)
throws AuthorizationException, KeyNotFoundException {
who = checkAndGetSubject(who);
validateKey(key);
SettableBlobMeta meta = getStoredBlobMeta(key);
aclHandler.validateUserCanReadMeta(meta.get_acl(), who, key);
ReadableBlobMeta rbm = new ReadableBlobMeta();
rbm.set_settable(meta);
try {
BlobStoreFile pf = hbs.read(DATA_PREFIX + key);
rbm.set_version(pf.getModTime());
} catch (IOException e) {
throw new RuntimeException(e);
}
return rbm;
}
@Override
public void setBlobMeta(String key, SettableBlobMeta meta, Subject who)
throws AuthorizationException, KeyNotFoundException {
if (meta.get_replication_factor() <= 0) {
meta.set_replication_factor((int)conf.get(Config.STORM_BLOBSTORE_REPLICATION_FACTOR));
}
who = checkAndGetSubject(who);
validateKey(key);
aclHandler.normalizeSettableBlobMeta(key, meta, who, ADMIN);
BlobStoreAclHandler.validateSettableACLs(key, meta.get_acl());
SettableBlobMeta orig = getStoredBlobMeta(key);
aclHandler.hasPermissions(orig.get_acl(), ADMIN, who, key);
BlobStoreFileOutputStream mOut = null;
writeMetadata(key, meta);
}
@Override
public void deleteBlob(String key, Subject who)
throws AuthorizationException, KeyNotFoundException {
who = checkAndGetSubject(who);
validateKey(key);
SettableBlobMeta meta = getStoredBlobMeta(key);
aclHandler.hasPermissions(meta.get_acl(), WRITE, who, key);
try {
hbs.deleteKey(DATA_PREFIX + key);
hbs.deleteKey(META_PREFIX + key);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public InputStreamWithMeta getBlob(String key, Subject who)
throws AuthorizationException, KeyNotFoundException {
who = checkAndGetSubject(who);
validateKey(key);
SettableBlobMeta meta = getStoredBlobMeta(key);
aclHandler.hasPermissions(meta.get_acl(), READ, who, key);
try {
return new BlobStoreFileInputStream(hbs.read(DATA_PREFIX + key));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public Iterator<String> listKeys() {
try {
return new KeyTranslationIterator(hbs.listKeys(), DATA_PREFIX);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void shutdown() {
//Empty
}
@Override
public int getBlobReplication(String key, Subject who) throws AuthorizationException, KeyNotFoundException {
who = checkAndGetSubject(who);
validateKey(key);
SettableBlobMeta meta = getStoredBlobMeta(key);
aclHandler.hasAnyPermissions(meta.get_acl(), READ | WRITE | ADMIN, who, key);
try {
return hbs.getBlobReplication(DATA_PREFIX + key);
} catch (IOException exp) {
throw new RuntimeException(exp);
}
}
@Override
public int updateBlobReplication(String key, int replication, Subject who) throws AuthorizationException, KeyNotFoundException {
who = checkAndGetSubject(who);
validateKey(key);
SettableBlobMeta meta = getStoredBlobMeta(key);
meta.set_replication_factor(replication);
aclHandler.hasAnyPermissions(meta.get_acl(), WRITE | ADMIN, who, key);
try {
writeMetadata(key, meta);
return hbs.updateBlobReplication(DATA_PREFIX + key, replication);
} catch (IOException exp) {
throw new RuntimeException(exp);
}
}
public void writeMetadata(String key, SettableBlobMeta meta)
throws AuthorizationException, KeyNotFoundException {
BlobStoreFileOutputStream mOut = null;
try {
BlobStoreFile hdfsFile = hbs.write(META_PREFIX + key, false);
hdfsFile.setMetadata(meta);
mOut = new BlobStoreFileOutputStream(hdfsFile);
mOut.write(Utils.thriftSerialize(meta));
mOut.close();
mOut = null;
} catch (IOException exp) {
throw new RuntimeException(exp);
} finally {
if (mOut != null) {
try {
mOut.cancel();
} catch (IOException e) {
//Ignored
}
}
}
}
public void fullCleanup(long age) throws IOException {
hbs.fullCleanup(age);
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.application.impl;
import com.google.common.annotations.VisibleForTesting;
import com.intellij.diagnostic.ThreadDumper;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.NonBlockingReadAction;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.application.constraints.ExpirableConstrainedExecution;
import com.intellij.openapi.application.constraints.Expiration;
import com.intellij.openapi.progress.EmptyProgressIndicator;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.util.ProgressIndicatorUtils;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.util.concurrency.Semaphore;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import kotlin.collections.ArraysKt;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import org.jetbrains.concurrency.AsyncPromise;
import org.jetbrains.concurrency.CancellablePromise;
import org.jetbrains.concurrency.Promises;
import java.util.Collections;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.BooleanSupplier;
import java.util.function.Consumer;
/**
* @author peter
*/
@VisibleForTesting
public class NonBlockingReadActionImpl<T>
extends ExpirableConstrainedExecution<NonBlockingReadActionImpl<T>>
implements NonBlockingReadAction<T> {
private final @Nullable Pair<ModalityState, Consumer<T>> myEdtFinish;
private final Callable<T> myComputation;
private static final Set<CancellablePromise<?>> ourTasks = ContainerUtil.newConcurrentSet();
NonBlockingReadActionImpl(@NotNull Callable<T> computation) {
this(computation, null, new ContextConstraint[0], new BooleanSupplier[0], Collections.emptySet());
}
private NonBlockingReadActionImpl(@NotNull Callable<T> computation,
@Nullable Pair<ModalityState, Consumer<T>> edtFinish,
@NotNull ContextConstraint[] constraints,
@NotNull BooleanSupplier[] cancellationConditions,
@NotNull Set<? extends Expiration> expirationSet) {
super(constraints, cancellationConditions, expirationSet);
myComputation = computation;
myEdtFinish = edtFinish;
}
@NotNull
@Override
protected NonBlockingReadActionImpl<T> cloneWith(@NotNull ContextConstraint[] constraints,
@NotNull BooleanSupplier[] cancellationConditions,
@NotNull Set<? extends Expiration> expirationSet) {
return new NonBlockingReadActionImpl<>(myComputation, myEdtFinish, constraints, cancellationConditions, expirationSet);
}
@Override
public void dispatchLaterUnconstrained(@NotNull Runnable runnable) {
ApplicationManager.getApplication().invokeLater(runnable, ModalityState.any());
}
@Override
public NonBlockingReadAction<T> inSmartMode(@NotNull Project project) {
return withConstraint(new InSmartMode(project), project);
}
@Override
public NonBlockingReadAction<T> withDocumentsCommitted(@NotNull Project project) {
return withConstraint(new WithDocumentsCommitted(project, ModalityState.any()), project);
}
@Override
public NonBlockingReadAction<T> expireWhen(@NotNull BooleanSupplier expireCondition) {
return cancelIf(expireCondition);
}
@Override
public NonBlockingReadAction<T> finishOnUiThread(@NotNull ModalityState modality, @NotNull Consumer<T> uiThreadAction) {
return new NonBlockingReadActionImpl<>(myComputation, Pair.create(modality, uiThreadAction),
getConstraints(), getCancellationConditions(), getExpirationSet());
}
@Override
public CancellablePromise<T> submit(@NotNull Executor backgroundThreadExecutor) {
AsyncPromise<T> promise = new AsyncPromise<>();
new Submission(promise, backgroundThreadExecutor).transferToBgThread();
if (ApplicationManager.getApplication().isUnitTestMode()) {
ourTasks.add(promise);
promise.onProcessed(__ -> ourTasks.remove(promise));
}
return promise;
}
private class Submission {
private final AsyncPromise<? super T> promise;
@NotNull private final Executor backendExecutor;
private volatile ProgressIndicator currentIndicator;
private final ModalityState creationModality = ModalityState.defaultModalityState();
@Nullable private final BooleanSupplier myExpireCondition;
Submission(AsyncPromise<? super T> promise, @NotNull Executor backgroundThreadExecutor) {
this.promise = promise;
backendExecutor = backgroundThreadExecutor;
promise.onError(__ -> {
ProgressIndicator indicator = currentIndicator;
if (indicator != null) {
indicator.cancel();
}
});
final Expiration expiration = composeExpiration();
if (expiration != null) {
final Expiration.Handle expirationHandle = expiration.invokeOnExpiration(promise::cancel);
promise.onProcessed(value -> expirationHandle.unregisterHandler());
}
myExpireCondition = composeCancellationCondition();
}
void transferToBgThread() {
transferToBgThread(ReschedulingAttempt.NULL);
}
void transferToBgThread(@NotNull ReschedulingAttempt previousAttempt) {
backendExecutor.execute(() -> {
final ProgressIndicator indicator = new EmptyProgressIndicator(creationModality);
currentIndicator = indicator;
try {
ProgressIndicatorUtils.runInReadActionWithWriteActionPriority(() -> insideReadAction(indicator), indicator);
}
finally {
currentIndicator = null;
}
if (Promises.isPending(promise)) {
doScheduleWithinConstraints(attempt -> dispatchLaterUnconstrained(() -> transferToBgThread(attempt)), previousAttempt);
}
});
}
void insideReadAction(ProgressIndicator indicator) {
try {
if (checkObsolete() || !constraintsAreSatisfied()) return;
T result = myComputation.call();
if (myEdtFinish != null) {
safeTransferToEdt(result, myEdtFinish, indicator);
} else {
promise.setResult(result);
}
}
catch (Throwable e) {
if (!indicator.isCanceled()) {
promise.setError(e);
}
}
}
private boolean constraintsAreSatisfied() {
return ArraysKt.all(getConstraints(), ContextConstraint::isCorrectContext);
}
private boolean checkObsolete() {
if (promise.isCancelled()) return true;
if (myExpireCondition != null && myExpireCondition.getAsBoolean()) {
promise.cancel();
return true;
}
return false;
}
void safeTransferToEdt(T result, Pair<? extends ModalityState, ? extends Consumer<T>> edtFinish, ProgressIndicator indicator) {
if (Promises.isRejected(promise)) return;
Semaphore semaphore = new Semaphore(1);
ApplicationManager.getApplication().invokeLater(() -> {
if (indicator.isCanceled()) {
// a write action has managed to sneak in before us, or the whole computation got canceled;
// anyway, nobody waits for us on bg thread, so we just exit
return;
}
if (checkObsolete()) {
semaphore.up();
return;
}
// complete the promise now to prevent write actions inside custom callback from cancelling it
promise.setResult(result);
// now background thread may release its read lock, and we continue on EDT, invoking custom callback
semaphore.up();
if (promise.isSucceeded()) { // in case another thread managed to cancel it just before `setResult`
edtFinish.second.accept(result);
}
}, edtFinish.first);
// don't release read action until we're on EDT, to avoid result invalidation in between
while (!semaphore.waitFor(10)) {
if (indicator.isCanceled()) { // checkCanceled isn't enough, because some smart developers disable it
throw new ProcessCanceledException();
}
}
}
}
@TestOnly
public static void cancelAllTasks() {
while (!ourTasks.isEmpty()) {
for (CancellablePromise<?> task : ourTasks) {
task.cancel();
}
WriteAction.run(() -> {}); // let background threads complete
}
}
@TestOnly
public static void waitForAsyncTaskCompletion() {
assert !ApplicationManager.getApplication().isWriteAccessAllowed();
for (CancellablePromise<?> task : ourTasks) {
waitForTask(task);
}
}
@TestOnly
private static void waitForTask(@NotNull CancellablePromise<?> task) {
int iteration = 0;
while (!task.isDone() && iteration++ < 60_000) {
UIUtil.dispatchAllInvocationEvents();
try {
task.blockingGet(1, TimeUnit.MILLISECONDS);
return;
}
catch (TimeoutException ignore) {
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
if (!task.isDone()) {
//noinspection UseOfSystemOutOrSystemErr
System.err.println(ThreadDumper.dumpThreadsToString());
throw new AssertionError("Too long async task");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.skywalking.oap.server.storage.plugin.elasticsearch;
import java.io.ByteArrayInputStream;
import java.util.Properties;
import java.util.function.Function;
import lombok.extern.slf4j.Slf4j;
import org.apache.skywalking.oap.server.library.util.StringUtil;
import org.apache.skywalking.oap.server.core.CoreModule;
import org.apache.skywalking.oap.server.core.storage.IBatchDAO;
import org.apache.skywalking.oap.server.core.storage.IHistoryDeleteDAO;
import org.apache.skywalking.oap.server.core.storage.StorageBuilderFactory;
import org.apache.skywalking.oap.server.core.storage.StorageDAO;
import org.apache.skywalking.oap.server.core.storage.StorageModule;
import org.apache.skywalking.oap.server.core.storage.cache.INetworkAddressAliasDAO;
import org.apache.skywalking.oap.server.core.storage.management.UITemplateManagementDAO;
import org.apache.skywalking.oap.server.core.storage.model.ModelCreator;
import org.apache.skywalking.oap.server.core.storage.profile.IProfileTaskLogQueryDAO;
import org.apache.skywalking.oap.server.core.storage.profile.IProfileTaskQueryDAO;
import org.apache.skywalking.oap.server.core.storage.profile.IProfileThreadSnapshotQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.IAggregationQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.IAlarmQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.IBrowserLogQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.IEventQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.ILogQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.IMetadataQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.IMetricsQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.ITopNRecordsQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.ITopologyQueryDAO;
import org.apache.skywalking.oap.server.core.storage.query.ITraceQueryDAO;
import org.apache.skywalking.oap.server.library.client.elasticsearch.ElasticSearchClient;
import org.apache.skywalking.oap.server.library.module.ModuleConfig;
import org.apache.skywalking.oap.server.library.module.ModuleDefine;
import org.apache.skywalking.oap.server.library.module.ModuleProvider;
import org.apache.skywalking.oap.server.library.module.ModuleStartException;
import org.apache.skywalking.oap.server.library.module.ServiceNotProvidedException;
import org.apache.skywalking.oap.server.library.util.MultipleFilesChangeMonitor;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.BatchProcessEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.HistoryDeleteEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.StorageEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.StorageEsInstaller;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.base.TimeSeriesUtils;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.cache.NetworkAddressAliasEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.AggregationQueryEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.AlarmQueryEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.BrowserLogQueryEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.ESEventQueryDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.LogQueryEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.MetadataQueryEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.MetricsQueryEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.ProfileTaskLogEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.ProfileTaskQueryEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.ProfileThreadSnapshotQueryEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.TopNRecordsQueryEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.TopologyQueryEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.TraceQueryEsDAO;
import org.apache.skywalking.oap.server.storage.plugin.elasticsearch.query.UITemplateManagementEsDAO;
import org.apache.skywalking.oap.server.telemetry.TelemetryModule;
import org.apache.skywalking.oap.server.telemetry.api.HealthCheckMetrics;
import org.apache.skywalking.oap.server.telemetry.api.MetricsCreator;
import org.apache.skywalking.oap.server.telemetry.api.MetricsTag;
/**
* The storage provider for ElasticSearch 6.
*/
@Slf4j
public class StorageModuleElasticsearchProvider extends ModuleProvider {
protected final StorageModuleElasticsearchConfig config;
protected ElasticSearchClient elasticSearchClient;
public StorageModuleElasticsearchProvider() {
super();
this.config = new StorageModuleElasticsearchConfig();
}
@Override
public String name() {
return "elasticsearch";
}
@Override
public Class<? extends ModuleDefine> module() {
return StorageModule.class;
}
@Override
public ModuleConfig createConfigBeanIfAbsent() {
return config;
}
@Override
public void prepare() throws ServiceNotProvidedException {
this.registerServiceImplementation(StorageBuilderFactory.class, new StorageBuilderFactory.Default());
if (StringUtil.isEmpty(config.getNamespace())) {
config.setNamespace("sw");
} else {
config.setNamespace(config.getNamespace().toLowerCase());
}
if (config.getDayStep() > 1) {
TimeSeriesUtils.setDAY_STEP(config.getDayStep());
TimeSeriesUtils.setSUPER_DATASET_DAY_STEP(config.getDayStep());
}
if (config.getSuperDatasetDayStep() > 0) {
TimeSeriesUtils.setSUPER_DATASET_DAY_STEP(config.getSuperDatasetDayStep());
}
if (!StringUtil.isEmpty(config.getSecretsManagementFile())) {
MultipleFilesChangeMonitor monitor = new MultipleFilesChangeMonitor(
10, readableContents -> {
final byte[] secretsFileContent = readableContents.get(0);
if (secretsFileContent == null) {
return;
}
Properties secrets = new Properties();
secrets.load(new ByteArrayInputStream(secretsFileContent));
config.setUser(secrets.getProperty("user", null));
config.setPassword(secrets.getProperty("password", null));
config.setTrustStorePass(secrets.getProperty("trustStorePass", null));
if (elasticSearchClient == null) {
// In the startup process, we just need to change the username/password
} else {
// The client has connected, updates the config and connects again.
elasticSearchClient.setUser(config.getUser());
elasticSearchClient.setPassword(config.getPassword());
elasticSearchClient.setTrustStorePass(config.getTrustStorePass());
elasticSearchClient.connect();
}
}, config.getSecretsManagementFile(), config.getTrustStorePass());
/*
* By leveraging the sync update check feature when startup.
*/
monitor.start();
}
elasticSearchClient = new ElasticSearchClient(
config.getClusterNodes(), config.getProtocol(), config.getTrustStorePath(), config
.getTrustStorePass(), config.getUser(), config.getPassword(),
indexNameConverter(config.getNamespace()), config.getConnectTimeout(),
config.getSocketTimeout(), config.getNumHttpClientThread()
);
this.registerServiceImplementation(
IBatchDAO.class,
new BatchProcessEsDAO(elasticSearchClient, config.getBulkActions(), config
.getFlushInterval(), config.getConcurrentRequests())
);
this.registerServiceImplementation(StorageDAO.class, new StorageEsDAO(elasticSearchClient));
this.registerServiceImplementation(
IHistoryDeleteDAO.class, new HistoryDeleteEsDAO(elasticSearchClient));
this.registerServiceImplementation(
INetworkAddressAliasDAO.class, new NetworkAddressAliasEsDAO(elasticSearchClient, config
.getResultWindowMaxSize()));
this.registerServiceImplementation(ITopologyQueryDAO.class, new TopologyQueryEsDAO(elasticSearchClient));
this.registerServiceImplementation(IMetricsQueryDAO.class, new MetricsQueryEsDAO(elasticSearchClient));
this.registerServiceImplementation(
ITraceQueryDAO.class, new TraceQueryEsDAO(elasticSearchClient, config.getSegmentQueryMaxSize()));
this.registerServiceImplementation(IBrowserLogQueryDAO.class, new BrowserLogQueryEsDAO(elasticSearchClient));
this.registerServiceImplementation(
IMetadataQueryDAO.class, new MetadataQueryEsDAO(elasticSearchClient, config.getMetadataQueryMaxSize()));
this.registerServiceImplementation(IAggregationQueryDAO.class, new AggregationQueryEsDAO(elasticSearchClient));
this.registerServiceImplementation(IAlarmQueryDAO.class, new AlarmQueryEsDAO(elasticSearchClient));
this.registerServiceImplementation(ITopNRecordsQueryDAO.class, new TopNRecordsQueryEsDAO(elasticSearchClient));
this.registerServiceImplementation(ILogQueryDAO.class, new LogQueryEsDAO(elasticSearchClient));
this.registerServiceImplementation(
IProfileTaskQueryDAO.class, new ProfileTaskQueryEsDAO(elasticSearchClient, config
.getProfileTaskQueryMaxSize()));
this.registerServiceImplementation(
IProfileTaskLogQueryDAO.class, new ProfileTaskLogEsDAO(elasticSearchClient, config
.getProfileTaskQueryMaxSize()));
this.registerServiceImplementation(
IProfileThreadSnapshotQueryDAO.class, new ProfileThreadSnapshotQueryEsDAO(elasticSearchClient, config
.getProfileTaskQueryMaxSize()));
this.registerServiceImplementation(
UITemplateManagementDAO.class, new UITemplateManagementEsDAO(elasticSearchClient));
this.registerServiceImplementation(IEventQueryDAO.class, new ESEventQueryDAO(elasticSearchClient));
}
@Override
public void start() throws ModuleStartException {
MetricsCreator metricCreator = getManager().find(TelemetryModule.NAME)
.provider()
.getService(MetricsCreator.class);
HealthCheckMetrics healthChecker = metricCreator.createHealthCheckerGauge(
"storage_elasticsearch", MetricsTag.EMPTY_KEY, MetricsTag.EMPTY_VALUE);
elasticSearchClient.registerChecker(healthChecker);
try {
elasticSearchClient.connect();
StorageEsInstaller installer = new StorageEsInstaller(elasticSearchClient, getManager(), config);
getManager().find(CoreModule.NAME).provider().getService(ModelCreator.class).addModelListener(installer);
} catch (Exception e) {
throw new ModuleStartException(e.getMessage(), e);
}
}
@Override
public void notifyAfterCompleted() {
}
@Override
public String[] requiredModules() {
return new String[] {CoreModule.NAME};
}
public static Function<String, String> indexNameConverter(String namespace) {
return indexName -> {
if (StringUtil.isNotEmpty(namespace)) {
return namespace + "_" + indexName;
}
return indexName;
};
}
}
| |
package com.xwray.groupie.example.databinding;
import android.content.Intent;
import android.content.SharedPreferences;
import androidx.annotation.NonNull;
import androidx.databinding.DataBindingUtil;
import android.os.Bundle;
import android.os.Handler;
import androidx.core.content.ContextCompat;
import androidx.appcompat.app.AppCompatActivity;
import androidx.recyclerview.widget.GridLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import androidx.recyclerview.widget.ItemTouchHelper;
import android.text.TextUtils;
import android.view.View;
import android.widget.Toast;
import com.xwray.groupie.ExpandableGroup;
import com.xwray.groupie.Group;
import com.xwray.groupie.GroupieAdapter;
import com.xwray.groupie.GroupieViewHolder;
import com.xwray.groupie.Item;
import com.xwray.groupie.OnItemClickListener;
import com.xwray.groupie.OnItemLongClickListener;
import com.xwray.groupie.Section;
import com.xwray.groupie.TouchCallback;
import com.xwray.groupie.example.core.InfiniteScrollListener;
import com.xwray.groupie.example.core.Prefs;
import com.xwray.groupie.example.core.SettingsActivity;
import com.xwray.groupie.example.core.decoration.CarouselItemDecoration;
import com.xwray.groupie.example.core.decoration.DebugItemDecoration;
import com.xwray.groupie.example.core.decoration.SwipeTouchCallback;
import com.xwray.groupie.example.databinding.databinding.ActivityMainBinding;
import com.xwray.groupie.example.databinding.item.CardItem;
import com.xwray.groupie.example.databinding.item.CarouselCardItem;
import com.xwray.groupie.example.databinding.item.ColumnItem;
import com.xwray.groupie.example.databinding.item.DraggableItem;
import com.xwray.groupie.example.databinding.item.FullBleedCardItem;
import com.xwray.groupie.example.databinding.item.HeaderItem;
import com.xwray.groupie.example.databinding.item.HeartCardItem;
import com.xwray.groupie.example.databinding.item.SmallCardItem;
import com.xwray.groupie.example.databinding.item.SwipeToDeleteItem;
import com.xwray.groupie.example.databinding.item.UpdatableItem;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class MainActivity extends AppCompatActivity {
public static final String INSET_TYPE_KEY = "inset_type";
public static final String FULL_BLEED = "full_bleed";
public static final String INSET = "inset";
private ActivityMainBinding binding;
private GroupieAdapter groupAdapter;
private GridLayoutManager layoutManager;
private Prefs prefs;
private int gray;
private int betweenPadding;
private int[] rainbow200;
private int[] rainbow500;
private Section infiniteLoadingSection;
private Section swipeSection;
private Section dragSection;
// Normally there's no need to hold onto a reference to this list, but for demonstration
// purposes, we'll shuffle this list and post an update periodically
private ArrayList<UpdatableItem> updatableItems;
// Hold a reference to the updating group, so we can, well, update it
private Section updatingGroup;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
binding = DataBindingUtil.setContentView(this, R.layout.activity_main);
prefs = Prefs.get(this);
gray = ContextCompat.getColor(this, R.color.background);
betweenPadding = getResources().getDimensionPixelSize(R.dimen.padding_small);
rainbow200 = getResources().getIntArray(R.array.rainbow_200);
rainbow500 = getResources().getIntArray(R.array.rainbow_500);
groupAdapter = new GroupieAdapter();
groupAdapter.setOnItemClickListener(onItemClickListener);
groupAdapter.setOnItemLongClickListener(onItemLongClickListener);
groupAdapter.setSpanCount(12);
populateAdapter();
layoutManager = new GridLayoutManager(this, groupAdapter.getSpanCount());
layoutManager.setSpanSizeLookup(groupAdapter.getSpanSizeLookup());
final RecyclerView recyclerView = binding.recyclerView;
recyclerView.setLayoutManager(layoutManager);
recyclerView.addItemDecoration(new HeaderItemDecoration(gray, betweenPadding));
recyclerView.addItemDecoration(new InsetItemDecoration(gray, betweenPadding));
recyclerView.addItemDecoration(new DebugItemDecoration(this));
recyclerView.setAdapter(groupAdapter);
recyclerView.addOnScrollListener(new InfiniteScrollListener(layoutManager) {
@Override public void onLoadMore(int currentPage) {
for (int i = 0; i < 5; i++) {
infiniteLoadingSection.add(new CardItem());
}
}
});
ItemTouchHelper itemTouchHelper = new ItemTouchHelper(touchCallback);
itemTouchHelper.attachToRecyclerView(recyclerView);
binding.fab.setOnClickListener(new View.OnClickListener() {
@Override public void onClick(View view) {
startActivity(new Intent(MainActivity.this, SettingsActivity.class));
}
});
prefs.registerListener(onSharedPrefChangeListener);
}
private void populateAdapter() {
// Full bleed item
Section fullBleedItemSection = new Section(new HeaderItem(R.string.full_bleed_item));
fullBleedItemSection.add(new FullBleedCardItem());
groupAdapter.add(fullBleedItemSection);
// Update in place group
Section updatingSection = new Section();
View.OnClickListener onShuffleClicked = new View.OnClickListener() {
@Override public void onClick(View view) {
List<UpdatableItem> shuffled = new ArrayList<>(updatableItems);
Collections.shuffle(shuffled);
updatingGroup.update(shuffled);
// You can also do this by forcing a change with payload
binding.recyclerView.post(new Runnable() {
@Override public void run() {
binding.recyclerView.invalidateItemDecorations();
}
});
}
};
HeaderItem updatingHeader = new HeaderItem(
R.string.updating_group,
R.string.updating_group_subtitle,
R.drawable.shuffle,
onShuffleClicked);
updatingSection.setHeader(updatingHeader);
updatingGroup = new Section();
updatableItems = new ArrayList<>();
for (int i = 1; i <= 12; i++) {
updatableItems.add(new UpdatableItem(i));
}
updatingGroup.update(updatableItems);
updatingSection.add(updatingGroup);
groupAdapter.add(updatingSection);
// Expandable group
ExpandableHeaderItem expandableHeaderItem = new ExpandableHeaderItem(R.string.expanding_group, R.string.expanding_group_subtitle);
ExpandableGroup expandableGroup = new ExpandableGroup(expandableHeaderItem);
for (int i = 0; i < 2; i++) {
expandableGroup.add(new CardItem());
}
groupAdapter.add(expandableGroup);
// Columns
Section columnSection = new Section(new HeaderItem(R.string.vertical_columns));
ColumnGroup columnGroup = makeColumnGroup();
columnSection.add(columnGroup);
groupAdapter.add(columnSection);
// Group showing even spacing with multiple columns
Section multipleColumnsSection = new Section(new HeaderItem(R.string.multiple_columns));
for (int i = 0; i < 12; i++) {
multipleColumnsSection.add(new SmallCardItem());
}
groupAdapter.add(multipleColumnsSection);
// Swipe to delete (with add button in header)
swipeSection = new Section(new HeaderItem(R.string.swipe_to_delete));
for (int i = 0; i < 3; i++) {
swipeSection.add(new SwipeToDeleteItem(rainbow200[6]));
}
groupAdapter.add(swipeSection);
dragSection = new Section(new HeaderItem(R.string.drag_to_reorder));
dragSection.clear();
for (int i = 0; i < 5; i++) {
dragSection.add(new DraggableItem(rainbow500[i]));
}
groupAdapter.add(dragSection);
// Horizontal carousel
Section carouselSection = new Section(new HeaderItem(R.string.carousel, R.string.carousel_subtitle));
carouselSection.setHideWhenEmpty(true);
Group carousel = makeCarouselGroup();
carouselSection.add(carousel);
groupAdapter.add(carouselSection);
// Update with payload
Section updateWithPayloadSection = new Section(new HeaderItem(R.string.update_with_payload, R.string.update_with_payload_subtitle));
for (int i = 0; i < rainbow500.length; i++) {
updateWithPayloadSection.add(new HeartCardItem(i, onFavoriteListener));
}
groupAdapter.add(updateWithPayloadSection);
// Infinite loading section
infiniteLoadingSection = new Section(new HeaderItem(R.string.infinite_loading));
groupAdapter.add(infiniteLoadingSection);
}
private ColumnGroup makeColumnGroup() {
List<ColumnItem> columnItems = new ArrayList<>();
for (int i = 1; i <= 5; i++) {
// First five items are red -- they'll end up in a vertical column
columnItems.add(new ColumnItem(i));
}
for (int i = 6; i <= 10; i++) {
// Next five items are pink
columnItems.add(new ColumnItem(i));
}
return new ColumnGroup(columnItems);
}
private Group makeCarouselGroup() {
CarouselItemDecoration carouselDecoration = new CarouselItemDecoration(gray, betweenPadding);
GroupieAdapter carouselAdapter = new GroupieAdapter();
for (int i = 0; i < 10; i++) {
carouselAdapter.add(new CarouselCardItem(rainbow200[i]));
}
return new CarouselGroup(carouselDecoration, carouselAdapter);
}
private OnItemClickListener onItemClickListener = new OnItemClickListener() {
@Override
public void onItemClick(Item item, View view) {
if (item instanceof CardItem) {
CardItem cardItem = (CardItem) item;
if (!TextUtils.isEmpty(cardItem.getText())) {
Toast.makeText(MainActivity.this, cardItem.getText(), Toast.LENGTH_SHORT).show();
}
}
}
};
private OnItemLongClickListener onItemLongClickListener = new OnItemLongClickListener() {
@Override
public boolean onItemLongClick(Item item, View view) {
if (item instanceof CardItem) {
CardItem cardItem = (CardItem) item;
if (!TextUtils.isEmpty(cardItem.getText())) {
Toast.makeText(MainActivity.this, "Long clicked: " + cardItem.getText(), Toast.LENGTH_SHORT).show();
return true;
}
}
return false;
}
};
@Override protected void onDestroy() {
prefs.unregisterListener(onSharedPrefChangeListener);
super.onDestroy();
}
private TouchCallback touchCallback = new SwipeTouchCallback() {
@Override public boolean onMove(@NonNull RecyclerView recyclerView, @NonNull RecyclerView.ViewHolder viewHolder, RecyclerView.ViewHolder target) {
Item item = groupAdapter.getItem(viewHolder.getBindingAdapterPosition());
Item targetItem = groupAdapter.getItem(target.getBindingAdapterPosition());
List<Group> dragItems = dragSection.getGroups();
int targetIndex = dragItems.indexOf(targetItem);
dragItems.remove(item);
// if item gets moved out of the boundary
if (targetIndex == -1) {
if (target.getBindingAdapterPosition() < viewHolder.getBindingAdapterPosition()) {
targetIndex = 0;
} else {
targetIndex = dragItems.size() - 1;
}
}
dragItems.add(targetIndex, item);
dragSection.update(dragItems);
return true;
}
@Override public void onSwiped(RecyclerView.ViewHolder viewHolder, int direction) {
Item item = groupAdapter.getItem(viewHolder.getBindingAdapterPosition());
// Change notification to the adapter happens automatically when the section is
// changed.
swipeSection.remove(item);
}
};
private SharedPreferences.OnSharedPreferenceChangeListener onSharedPrefChangeListener =
new SharedPreferences.OnSharedPreferenceChangeListener() {
@Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String s) {
// This is pretty evil, try not to do this
groupAdapter.notifyDataSetChanged();
}
};
private Handler handler = new Handler();
private HeartCardItem.OnFavoriteListener onFavoriteListener = new HeartCardItem.OnFavoriteListener() {
@Override
public void onFavorite(final HeartCardItem item, final boolean favorite) {
// Pretend to make a network request
handler.postDelayed(new Runnable() {
@Override
public void run() {
// Network request was successful!
item.setFavorite(favorite);
item.notifyChanged(HeartCardItem.FAVORITE);
}
}, 1000);
}
};
}
| |
/*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.web.servlet;
import java.net.URI;
import javax.servlet.MultipartConfigElement;
import org.junit.After;
import org.junit.Test;
import org.springframework.boot.autoconfigure.web.ServerProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.test.util.TestPropertyValues;
import org.springframework.boot.web.embedded.jetty.JettyServletWebServerFactory;
import org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory;
import org.springframework.boot.web.embedded.undertow.UndertowServletWebServerFactory;
import org.springframework.boot.web.servlet.context.AnnotationConfigServletWebServerApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.client.ClientHttpRequest;
import org.springframework.http.client.ClientHttpResponse;
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
import org.springframework.stereotype.Controller;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.multipart.MultipartResolver;
import org.springframework.web.multipart.commons.CommonsMultipartResolver;
import org.springframework.web.multipart.support.StandardServletMultipartResolver;
import org.springframework.web.servlet.DispatcherServlet;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
/**
* Tests for {@link MultipartAutoConfiguration}. Tests an empty configuration, no
* multipart configuration, and a multipart configuration (with both Jetty and Tomcat).
*
* @author Greg Turnquist
* @author Dave Syer
* @author Josh Long
* @author Ivan Sopov
* @author Toshiaki Maki
*/
public class MultipartAutoConfigurationTests {
private AnnotationConfigServletWebServerApplicationContext context;
@After
public void close() {
if (this.context != null) {
this.context.close();
}
}
@Test
public void webServerWithNothing() throws Exception {
this.context = new AnnotationConfigServletWebServerApplicationContext(
WebServerWithNothing.class, BaseConfiguration.class);
DispatcherServlet servlet = this.context.getBean(DispatcherServlet.class);
verify404();
assertThat(servlet.getMultipartResolver()).isNotNull();
assertThat(this.context.getBeansOfType(StandardServletMultipartResolver.class))
.hasSize(1);
assertThat(this.context.getBeansOfType(MultipartResolver.class)).hasSize(1);
}
@Test
public void webServerWithNoMultipartJettyConfiguration() {
this.context = new AnnotationConfigServletWebServerApplicationContext(
WebServerWithNoMultipartJetty.class, BaseConfiguration.class);
DispatcherServlet servlet = this.context.getBean(DispatcherServlet.class);
assertThat(servlet.getMultipartResolver()).isNotNull();
assertThat(this.context.getBeansOfType(StandardServletMultipartResolver.class))
.hasSize(1);
assertThat(this.context.getBeansOfType(MultipartResolver.class)).hasSize(1);
verifyServletWorks();
}
@Test
public void webServerWithNoMultipartUndertowConfiguration() {
this.context = new AnnotationConfigServletWebServerApplicationContext(
WebServerWithNoMultipartUndertow.class, BaseConfiguration.class);
DispatcherServlet servlet = this.context.getBean(DispatcherServlet.class);
verifyServletWorks();
assertThat(servlet.getMultipartResolver()).isNotNull();
assertThat(this.context.getBeansOfType(StandardServletMultipartResolver.class))
.hasSize(1);
assertThat(this.context.getBeansOfType(MultipartResolver.class)).hasSize(1);
}
@Test
public void webServerWithNoMultipartTomcatConfiguration() {
this.context = new AnnotationConfigServletWebServerApplicationContext(
WebServerWithNoMultipartTomcat.class, BaseConfiguration.class);
DispatcherServlet servlet = this.context.getBean(DispatcherServlet.class);
assertThat(servlet.getMultipartResolver()).isNull();
assertThat(this.context.getBeansOfType(StandardServletMultipartResolver.class))
.hasSize(1);
assertThat(this.context.getBeansOfType(MultipartResolver.class)).hasSize(1);
verifyServletWorks();
}
@Test
public void webServerWithAutomatedMultipartJettyConfiguration() {
this.context = new AnnotationConfigServletWebServerApplicationContext(
WebServerWithEverythingJetty.class, BaseConfiguration.class);
this.context.getBean(MultipartConfigElement.class);
assertThat(this.context.getBean(StandardServletMultipartResolver.class)).isSameAs(
this.context.getBean(DispatcherServlet.class).getMultipartResolver());
verifyServletWorks();
}
@Test
public void webServerWithAutomatedMultipartTomcatConfiguration() {
this.context = new AnnotationConfigServletWebServerApplicationContext(
WebServerWithEverythingTomcat.class, BaseConfiguration.class);
new RestTemplate().getForObject(
"http://localhost:" + this.context.getWebServer().getPort() + "/",
String.class);
this.context.getBean(MultipartConfigElement.class);
assertThat(this.context.getBean(StandardServletMultipartResolver.class)).isSameAs(
this.context.getBean(DispatcherServlet.class).getMultipartResolver());
verifyServletWorks();
}
@Test
public void webServerWithAutomatedMultipartUndertowConfiguration() {
this.context = new AnnotationConfigServletWebServerApplicationContext(
WebServerWithEverythingUndertow.class, BaseConfiguration.class);
this.context.getBean(MultipartConfigElement.class);
verifyServletWorks();
assertThat(this.context.getBean(StandardServletMultipartResolver.class)).isSameAs(
this.context.getBean(DispatcherServlet.class).getMultipartResolver());
}
@Test
public void webServerWithMultipartConfigDisabled() {
testWebServerWithCustomMultipartConfigEnabledSetting("false", 0);
}
@Test
public void webServerWithMultipartConfigEnabled() {
testWebServerWithCustomMultipartConfigEnabledSetting("true", 1);
}
private void testWebServerWithCustomMultipartConfigEnabledSetting(
final String propertyValue, int expectedNumberOfMultipartConfigElementBeans) {
this.context = new AnnotationConfigServletWebServerApplicationContext();
TestPropertyValues.of("spring.servlet.multipart.enabled=" + propertyValue)
.applyTo(this.context);
this.context.register(WebServerWithNoMultipartTomcat.class,
BaseConfiguration.class);
this.context.refresh();
this.context.getBean(MultipartProperties.class);
assertThat(this.context.getBeansOfType(MultipartConfigElement.class))
.hasSize(expectedNumberOfMultipartConfigElementBeans);
}
@Test
public void webServerWithCustomMultipartResolver() {
this.context = new AnnotationConfigServletWebServerApplicationContext(
WebServerWithCustomMultipartResolver.class, BaseConfiguration.class);
MultipartResolver multipartResolver = this.context
.getBean(MultipartResolver.class);
assertThat(multipartResolver)
.isNotInstanceOf(StandardServletMultipartResolver.class);
assertThat(this.context.getBeansOfType(MultipartConfigElement.class)).hasSize(1);
}
@Test
public void containerWithCommonsMultipartResolver() throws Exception {
this.context = new AnnotationConfigServletWebServerApplicationContext(
ContainerWithCommonsMultipartResolver.class, BaseConfiguration.class);
MultipartResolver multipartResolver = this.context
.getBean(MultipartResolver.class);
assertThat(multipartResolver).isInstanceOf(CommonsMultipartResolver.class);
assertThat(this.context.getBeansOfType(MultipartConfigElement.class)).hasSize(0);
}
@Test
public void configureResolveLazily() {
this.context = new AnnotationConfigServletWebServerApplicationContext();
TestPropertyValues.of("spring.servlet.multipart.resolve-lazily=true")
.applyTo(this.context);
this.context.register(WebServerWithNothing.class, BaseConfiguration.class);
this.context.refresh();
StandardServletMultipartResolver multipartResolver = this.context
.getBean(StandardServletMultipartResolver.class);
boolean resolveLazily = (Boolean) ReflectionTestUtils.getField(multipartResolver,
"resolveLazily");
assertThat(resolveLazily).isTrue();
}
private void verify404() throws Exception {
HttpComponentsClientHttpRequestFactory requestFactory = new HttpComponentsClientHttpRequestFactory();
ClientHttpRequest request = requestFactory.createRequest(new URI(
"http://localhost:" + this.context.getWebServer().getPort() + "/"),
HttpMethod.GET);
ClientHttpResponse response = request.execute();
assertThat(response.getStatusCode()).isEqualTo(HttpStatus.NOT_FOUND);
}
private void verifyServletWorks() {
RestTemplate restTemplate = new RestTemplate();
String url = "http://localhost:" + this.context.getWebServer().getPort() + "/";
assertThat(restTemplate.getForObject(url, String.class)).isEqualTo("Hello");
}
@Configuration
public static class WebServerWithNothing {
}
@Configuration
public static class WebServerWithNoMultipartJetty {
@Bean
JettyServletWebServerFactory webServerFactory() {
return new JettyServletWebServerFactory();
}
@Bean
WebController controller() {
return new WebController();
}
}
@Configuration
public static class WebServerWithNoMultipartUndertow {
@Bean
UndertowServletWebServerFactory webServerFactory() {
return new UndertowServletWebServerFactory();
}
@Bean
WebController controller() {
return new WebController();
}
}
@Configuration
@Import({ ServletWebServerFactoryAutoConfiguration.class,
DispatcherServletAutoConfiguration.class, MultipartAutoConfiguration.class })
@EnableConfigurationProperties(MultipartProperties.class)
protected static class BaseConfiguration {
@Bean
public ServerProperties serverProperties() {
ServerProperties properties = new ServerProperties();
properties.setPort(0);
return properties;
}
}
@Configuration
public static class WebServerWithNoMultipartTomcat {
@Bean
TomcatServletWebServerFactory webServerFactory() {
return new TomcatServletWebServerFactory();
}
@Bean
WebController controller() {
return new WebController();
}
}
@Configuration
public static class WebServerWithEverythingJetty {
@Bean
MultipartConfigElement multipartConfigElement() {
return new MultipartConfigElement("");
}
@Bean
JettyServletWebServerFactory webServerFactory() {
return new JettyServletWebServerFactory();
}
@Bean
WebController webController() {
return new WebController();
}
}
@Configuration
@EnableWebMvc
public static class WebServerWithEverythingTomcat {
@Bean
MultipartConfigElement multipartConfigElement() {
return new MultipartConfigElement("");
}
@Bean
TomcatServletWebServerFactory webServerFactory() {
return new TomcatServletWebServerFactory();
}
@Bean
WebController webController() {
return new WebController();
}
}
@Configuration
@EnableWebMvc
public static class WebServerWithEverythingUndertow {
@Bean
MultipartConfigElement multipartConfigElement() {
return new MultipartConfigElement("");
}
@Bean
UndertowServletWebServerFactory webServerFactory() {
return new UndertowServletWebServerFactory();
}
@Bean
WebController webController() {
return new WebController();
}
}
public static class WebServerWithCustomMultipartResolver {
@Bean
MultipartResolver multipartResolver() {
return mock(MultipartResolver.class);
}
}
public static class ContainerWithCommonsMultipartResolver {
@Bean
CommonsMultipartResolver multipartResolver() {
return mock(CommonsMultipartResolver.class);
}
}
@Controller
public static class WebController {
@RequestMapping("/")
@ResponseBody
public String index() {
return "Hello";
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.core.persistence.config;
import org.apache.activemq.artemis.api.core.ActiveMQBuffer;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.core.journal.EncodingSupport;
public class PersistedRoles implements EncodingSupport
{
// Constants -----------------------------------------------------
// Attributes ----------------------------------------------------
private long storeId;
private SimpleString addressMatch;
private SimpleString sendRoles;
private SimpleString consumeRoles;
private SimpleString createDurableQueueRoles;
private SimpleString deleteDurableQueueRoles;
private SimpleString createNonDurableQueueRoles;
private SimpleString deleteNonDurableQueueRoles;
private SimpleString manageRoles;
// Static --------------------------------------------------------
// Constructors --------------------------------------------------
public PersistedRoles()
{
}
/**
* @param address
* @param addressMatch
* @param sendRoles
* @param consumeRoles
* @param createDurableQueueRoles
* @param deleteDurableQueueRoles
* @param createNonDurableQueueRoles
* @param deleteNonDurableQueueRoles
* @param manageRoles
*/
public PersistedRoles(final String addressMatch,
final String sendRoles,
final String consumeRoles,
final String createDurableQueueRoles,
final String deleteDurableQueueRoles,
final String createNonDurableQueueRoles,
final String deleteNonDurableQueueRoles,
final String manageRoles)
{
super();
this.addressMatch = SimpleString.toSimpleString(addressMatch);
this.sendRoles = SimpleString.toSimpleString(sendRoles);
this.consumeRoles = SimpleString.toSimpleString(consumeRoles);
this.createDurableQueueRoles = SimpleString.toSimpleString(createDurableQueueRoles);
this.deleteDurableQueueRoles = SimpleString.toSimpleString(deleteDurableQueueRoles);
this.createNonDurableQueueRoles = SimpleString.toSimpleString(createNonDurableQueueRoles);
this.deleteNonDurableQueueRoles = SimpleString.toSimpleString(deleteNonDurableQueueRoles);
this.manageRoles = SimpleString.toSimpleString(manageRoles);
}
// Public --------------------------------------------------------
public long getStoreId()
{
return storeId;
}
public void setStoreId(final long id)
{
storeId = id;
}
/**
* @return the addressMatch
*/
public SimpleString getAddressMatch()
{
return addressMatch;
}
/**
* @return the sendRoles
*/
public String getSendRoles()
{
return sendRoles.toString();
}
/**
* @return the consumeRoles
*/
public String getConsumeRoles()
{
return consumeRoles.toString();
}
/**
* @return the createDurableQueueRoles
*/
public String getCreateDurableQueueRoles()
{
return createDurableQueueRoles.toString();
}
/**
* @return the deleteDurableQueueRoles
*/
public String getDeleteDurableQueueRoles()
{
return deleteDurableQueueRoles.toString();
}
/**
* @return the createNonDurableQueueRoles
*/
public String getCreateNonDurableQueueRoles()
{
return createNonDurableQueueRoles.toString();
}
/**
* @return the deleteNonDurableQueueRoles
*/
public String getDeleteNonDurableQueueRoles()
{
return deleteNonDurableQueueRoles.toString();
}
/**
* @return the manageRoles
*/
public String getManageRoles()
{
return manageRoles.toString();
}
@Override
public void encode(final ActiveMQBuffer buffer)
{
buffer.writeSimpleString(addressMatch);
buffer.writeNullableSimpleString(sendRoles);
buffer.writeNullableSimpleString(consumeRoles);
buffer.writeNullableSimpleString(createDurableQueueRoles);
buffer.writeNullableSimpleString(deleteDurableQueueRoles);
buffer.writeNullableSimpleString(createNonDurableQueueRoles);
buffer.writeNullableSimpleString(deleteNonDurableQueueRoles);
buffer.writeNullableSimpleString(manageRoles);
}
@Override
public int getEncodeSize()
{
return addressMatch.sizeof() + SimpleString.sizeofNullableString(sendRoles) +
SimpleString.sizeofNullableString(consumeRoles) +
SimpleString.sizeofNullableString(createDurableQueueRoles) +
SimpleString.sizeofNullableString(deleteDurableQueueRoles) +
SimpleString.sizeofNullableString(createNonDurableQueueRoles) +
SimpleString.sizeofNullableString(deleteNonDurableQueueRoles) +
SimpleString.sizeofNullableString(manageRoles);
}
@Override
public void decode(final ActiveMQBuffer buffer)
{
addressMatch = buffer.readSimpleString();
sendRoles = buffer.readNullableSimpleString();
consumeRoles = buffer.readNullableSimpleString();
createDurableQueueRoles = buffer.readNullableSimpleString();
deleteDurableQueueRoles = buffer.readNullableSimpleString();
createNonDurableQueueRoles = buffer.readNullableSimpleString();
deleteNonDurableQueueRoles = buffer.readNullableSimpleString();
manageRoles = buffer.readNullableSimpleString();
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + ((addressMatch == null) ? 0 : addressMatch.hashCode());
result = prime * result + ((consumeRoles == null) ? 0 : consumeRoles.hashCode());
result = prime * result + ((createDurableQueueRoles == null) ? 0 : createDurableQueueRoles.hashCode());
result = prime * result + ((createNonDurableQueueRoles == null) ? 0 : createNonDurableQueueRoles.hashCode());
result = prime * result + ((deleteDurableQueueRoles == null) ? 0 : deleteDurableQueueRoles.hashCode());
result = prime * result + ((deleteNonDurableQueueRoles == null) ? 0 : deleteNonDurableQueueRoles.hashCode());
result = prime * result + ((manageRoles == null) ? 0 : manageRoles.hashCode());
result = prime * result + ((sendRoles == null) ? 0 : sendRoles.hashCode());
result = prime * result + (int)(storeId ^ (storeId >>> 32));
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
PersistedRoles other = (PersistedRoles)obj;
if (addressMatch == null)
{
if (other.addressMatch != null)
return false;
}
else if (!addressMatch.equals(other.addressMatch))
return false;
if (consumeRoles == null)
{
if (other.consumeRoles != null)
return false;
}
else if (!consumeRoles.equals(other.consumeRoles))
return false;
if (createDurableQueueRoles == null)
{
if (other.createDurableQueueRoles != null)
return false;
}
else if (!createDurableQueueRoles.equals(other.createDurableQueueRoles))
return false;
if (createNonDurableQueueRoles == null)
{
if (other.createNonDurableQueueRoles != null)
return false;
}
else if (!createNonDurableQueueRoles.equals(other.createNonDurableQueueRoles))
return false;
if (deleteDurableQueueRoles == null)
{
if (other.deleteDurableQueueRoles != null)
return false;
}
else if (!deleteDurableQueueRoles.equals(other.deleteDurableQueueRoles))
return false;
if (deleteNonDurableQueueRoles == null)
{
if (other.deleteNonDurableQueueRoles != null)
return false;
}
else if (!deleteNonDurableQueueRoles.equals(other.deleteNonDurableQueueRoles))
return false;
if (manageRoles == null)
{
if (other.manageRoles != null)
return false;
}
else if (!manageRoles.equals(other.manageRoles))
return false;
if (sendRoles == null)
{
if (other.sendRoles != null)
return false;
}
else if (!sendRoles.equals(other.sendRoles))
return false;
if (storeId != other.storeId)
return false;
return true;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString()
{
return "PersistedRoles [storeId=" + storeId +
", addressMatch=" +
addressMatch +
", sendRoles=" +
sendRoles +
", consumeRoles=" +
consumeRoles +
", createDurableQueueRoles=" +
createDurableQueueRoles +
", deleteDurableQueueRoles=" +
deleteDurableQueueRoles +
", createNonDurableQueueRoles=" +
createNonDurableQueueRoles +
", deleteNonDurableQueueRoles=" +
deleteNonDurableQueueRoles +
", manageRoles=" +
manageRoles +
"]";
}
// Package protected ---------------------------------------------
// Protected -----------------------------------------------------
// Private -------------------------------------------------------
// Inner classes -------------------------------------------------
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* User: anna
* Date: 11-Nov-2008
*/
package org.jetbrains.idea.eclipse.conversion;
import com.intellij.openapi.components.ExpandMacroToPathMap;
import com.intellij.openapi.components.PathMacroManager;
import com.intellij.openapi.components.impl.BasePathMacroManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.ProjectJdkTable;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.ex.JavaSdkUtil;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.LibraryTable;
import com.intellij.openapi.roots.libraries.LibraryTablesRegistrar;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.vfs.JarFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.util.ArrayUtil;
import gnu.trove.THashSet;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.eclipse.*;
import org.jetbrains.idea.eclipse.config.EclipseModuleManagerImpl;
import org.jetbrains.idea.eclipse.importWizard.EclipseNatureImporter;
import org.jetbrains.idea.eclipse.util.ErrorLog;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Set;
public class EclipseClasspathReader extends AbstractEclipseClasspathReader<ModifiableRootModel> {
private final Project myProject;
private ContentEntry myContentEntry;
public EclipseClasspathReader(@NotNull String rootPath, @NotNull Project project, @Nullable List<String> currentRoots) {
this(rootPath, project, currentRoots, null);
}
public EclipseClasspathReader(@NotNull String rootPath, @NotNull Project project, @Nullable List<String> currentRoots, @Nullable Set<String> moduleNames) {
super(rootPath, currentRoots, moduleNames);
myProject = project;
}
public void init(@NotNull ModifiableRootModel model) {
myContentEntry = model.addContentEntry(pathToUrl(myRootPath));
}
public static void collectVariables(Set<String> usedVariables, Element classpathElement, final String rootPath) {
for (Element element : classpathElement.getChildren(EclipseXml.CLASSPATHENTRY_TAG)) {
String path = element.getAttributeValue(EclipseXml.PATH_ATTR);
if (path == null) {
continue;
}
String kind = element.getAttributeValue(EclipseXml.KIND_ATTR);
if (Comparing.strEqual(kind, EclipseXml.VAR_KIND)) {
createEPathVariable(path, 0);
String srcPath = element.getAttributeValue(EclipseXml.SOURCEPATH_ATTR);
if (srcPath != null) {
createEPathVariable(srcPath, srcVarStart(srcPath));
}
}
else if (Comparing.strEqual(kind, EclipseXml.SRC_KIND) || Comparing.strEqual(kind, EclipseXml.OUTPUT_KIND)) {
EclipseProjectFinder.LinkedResource linkedResource = EclipseProjectFinder.findLinkedResource(rootPath, path);
if (linkedResource != null && linkedResource.containsPathVariable()) {
usedVariables.add(linkedResource.getVariableName());
}
}
}
}
public void readClasspath(@NotNull ModifiableRootModel model, @NotNull Element classpathElement) throws IOException, ConversionException {
Set<String> sink = new THashSet<>();
readClasspath(model, sink, sink, sink, null, classpathElement);
}
public void readClasspath(@NotNull ModifiableRootModel model,
@NotNull Collection<String> unknownLibraries,
@NotNull Collection<String> unknownJdks,
Set<String> refsToModules,
final String testPattern,
Element classpathElement) throws IOException, ConversionException {
for (OrderEntry orderEntry : model.getOrderEntries()) {
if (!(orderEntry instanceof ModuleSourceOrderEntry)) {
model.removeOrderEntry(orderEntry);
}
}
int idx = 0;
EclipseModuleManagerImpl eclipseModuleManager = EclipseModuleManagerImpl.getInstance(model.getModule());
Set<String> libs = new THashSet<>();
for (Element o : classpathElement.getChildren(EclipseXml.CLASSPATHENTRY_TAG)) {
try {
readClasspathEntry(model, unknownLibraries, unknownJdks, refsToModules, testPattern, o, idx++,
eclipseModuleManager,
((BasePathMacroManager)PathMacroManager.getInstance(model.getModule())).getExpandMacroMap(), libs);
}
catch (ConversionException e) {
ErrorLog.rethrow(ErrorLog.Level.Warning, null, EclipseXml.CLASSPATH_FILE, e);
}
}
if (!model.isSdkInherited() && model.getSdkName() == null) {
eclipseModuleManager.setForceConfigureJDK();
model.inheritSdk();
}
}
@Override
protected int rearrange(ModifiableRootModel rootModel) {
return rearrangeOrderEntryOfType(rootModel, ModuleSourceOrderEntry.class);
}
@Override
protected String expandEclipsePath2Url(ModifiableRootModel rootModel, String path) {
final VirtualFile contentRoot = myContentEntry.getFile();
if (contentRoot != null) {
return EPathUtil.expandEclipsePath2Url(path, rootModel, myCurrentRoots, contentRoot);
}
return EPathUtil.expandEclipsePath2Url(path, rootModel, myCurrentRoots);
}
@Override
protected Set<String> getDefinedCons() {
return EclipseNatureImporter.getAllDefinedCons();
}
@Override
protected void addModuleLibrary(ModifiableRootModel rootModel,
Element element,
boolean exported,
String libName,
String url,
String srcUrl,
String nativeRoot,
ExpandMacroToPathMap macroMap) {
final Library library = rootModel.getModuleLibraryTable().getModifiableModel().createLibrary(libName);
final Library.ModifiableModel modifiableModel = library.getModifiableModel();
modifiableModel.addRoot(url, OrderRootType.CLASSES);
if (srcUrl != null) {
modifiableModel.addRoot(srcUrl, OrderRootType.SOURCES);
}
if (nativeRoot != null) {
modifiableModel.addRoot(nativeRoot, NativeLibraryOrderRootType.getInstance());
}
EJavadocUtil.appendJavadocRoots(element, rootModel, myCurrentRoots, modifiableModel);
modifiableModel.commit();
setLibraryEntryExported(rootModel, exported, library);
}
@Override
protected void addJUnitDefaultLib(ModifiableRootModel rootModel, String junitName, ExpandMacroToPathMap macroMap) {
final Library library = rootModel.getModuleLibraryTable().getModifiableModel().createLibrary(junitName);
final Library.ModifiableModel modifiableModel = library.getModifiableModel();
modifiableModel.addRoot(getJunitClsUrl(junitName.contains("4")), OrderRootType.CLASSES);
modifiableModel.commit();
}
@Override
protected void addSourceFolderToCurrentContentRoot(ModifiableRootModel rootModel,
String srcUrl,
boolean testFolder) {
myContentEntry.addSourceFolder(srcUrl, testFolder);
}
@Override
protected void addSourceFolder(ModifiableRootModel rootModel, String srcUrl, boolean testFolder) {
rootModel.addContentEntry(srcUrl).addSourceFolder(srcUrl, testFolder);
}
@Override
protected void setUpModuleJdk(ModifiableRootModel rootModel,
Collection<String> unknownJdks,
EclipseModuleManager eclipseModuleManager,
String jdkName) {
if (jdkName == null) {
rootModel.inheritSdk();
}
else {
final Sdk moduleJdk = ProjectJdkTable.getInstance().findJdk(jdkName);
if (moduleJdk != null) {
rootModel.setSdk(moduleJdk);
}
else {
rootModel.setInvalidSdk(jdkName, IdeaXml.JAVA_SDK_TYPE);
eclipseModuleManager.setInvalidJdk(jdkName);
unknownJdks.add(jdkName);
}
}
rearrangeOrderEntryOfType(rootModel, JdkOrderEntry.class);
}
@Override
protected void addInvalidModuleEntry(ModifiableRootModel rootModel, boolean exported, String moduleName) {
rootModel.addInvalidModuleEntry(moduleName).setExported(exported);
}
private static int rearrangeOrderEntryOfType(ModifiableRootModel rootModel, Class<? extends OrderEntry> orderEntryClass) {
OrderEntry[] orderEntries = rootModel.getOrderEntries();
int moduleSourcesIdx = 0;
for (OrderEntry orderEntry : orderEntries) {
if (orderEntryClass.isAssignableFrom(orderEntry.getClass())) {
break;
}
moduleSourcesIdx++;
}
orderEntries = ArrayUtil.append(orderEntries, orderEntries[moduleSourcesIdx]);
orderEntries = ArrayUtil.remove(orderEntries, moduleSourcesIdx);
rootModel.rearrangeOrderEntries(orderEntries);
return orderEntries.length - 1;
}
@Override
public void setupOutput(ModifiableRootModel rootModel, final String path) {
setOutputUrl(rootModel, path);
}
public static void setOutputUrl(@NotNull ModifiableRootModel rootModel, @NotNull String path) {
CompilerModuleExtension compilerModuleExtension = rootModel.getModuleExtension(CompilerModuleExtension.class);
compilerModuleExtension.setCompilerOutputPath(pathToUrl(path));
compilerModuleExtension.inheritCompilerOutputPath(false);
}
private static void setLibraryEntryExported(ModifiableRootModel rootModel, boolean exported, Library library) {
for (OrderEntry orderEntry : rootModel.getOrderEntries()) {
if (orderEntry instanceof LibraryOrderEntry &&
((LibraryOrderEntry)orderEntry).isModuleLevel() &&
Comparing.equal(((LibraryOrderEntry)orderEntry).getLibrary(), library)) {
((LibraryOrderEntry)orderEntry).setExported(exported);
break;
}
}
}
@Override
protected void addNamedLibrary(final ModifiableRootModel rootModel,
final Collection<String> unknownLibraries,
final boolean exported,
final String name,
final boolean applicationLevel) {
Library lib = findLibraryByName(myProject, name);
if (lib != null) {
rootModel.addLibraryEntry(lib).setExported(exported);
}
else {
unknownLibraries.add(name);
rootModel.addInvalidLibrary(name, applicationLevel ? LibraryTablesRegistrar.APPLICATION_LEVEL : LibraryTablesRegistrar.PROJECT_LEVEL).setExported(exported);
}
}
public static Library findLibraryByName(Project project, String name) {
final LibraryTablesRegistrar tablesRegistrar = LibraryTablesRegistrar.getInstance();
Library lib = tablesRegistrar.getLibraryTable().getLibraryByName(name);
if (lib == null) {
lib = tablesRegistrar.getLibraryTable(project).getLibraryByName(name);
}
if (lib == null) {
for (LibraryTable table : tablesRegistrar.getCustomLibraryTables()) {
lib = table.getLibraryByName(name);
if (lib != null) {
break;
}
}
}
return lib;
}
static String getJunitClsUrl(final boolean version4) {
String url = version4 ? JavaSdkUtil.getJunit4JarPath() : JavaSdkUtil.getJunit3JarPath();
final VirtualFile localFile = VirtualFileManager.getInstance().findFileByUrl(pathToUrl(url));
if (localFile != null) {
final VirtualFile jarFile = JarFileSystem.getInstance().getJarRootForLocalFile(localFile);
url = jarFile != null ? jarFile.getUrl() : localFile.getUrl();
}
return url;
}
@Override
protected String prepareValidUrlInsideJar(String url) {
final VirtualFile localFile = VirtualFileManager.getInstance().findFileByUrl(url);
if (localFile != null) {
final VirtualFile jarFile = JarFileSystem.getInstance().getJarRootForLocalFile(localFile);
if (jarFile != null) {
return jarFile.getUrl();
}
}
return url;
}
}
| |
/**
* JBoss, Home of Professional Open Source
* Copyright Red Hat, Inc., and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.aerogear.unifiedpush.jpa;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceException;
import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException;
import net.jakubholy.dbunitexpress.EmbeddedDbTesterRule;
import org.jboss.aerogear.unifiedpush.api.AdmVariant;
import org.jboss.aerogear.unifiedpush.api.AndroidVariant;
import org.jboss.aerogear.unifiedpush.api.Category;
import org.jboss.aerogear.unifiedpush.api.Installation;
import org.jboss.aerogear.unifiedpush.api.SimplePushVariant;
import org.jboss.aerogear.unifiedpush.api.Variant;
import org.jboss.aerogear.unifiedpush.api.WindowsMPNSVariant;
import org.jboss.aerogear.unifiedpush.api.WindowsWNSVariant;
import org.jboss.aerogear.unifiedpush.api.iOSVariant;
import org.jboss.aerogear.unifiedpush.dao.PageResult;
import org.jboss.aerogear.unifiedpush.dao.ResultStreamException;
import org.jboss.aerogear.unifiedpush.dao.ResultsStream;
import org.jboss.aerogear.unifiedpush.jpa.dao.impl.JPAInstallationDao;
import org.jboss.aerogear.unifiedpush.utils.DaoDeployment;
import org.jboss.aerogear.unifiedpush.utils.TestUtils;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(Arquillian.class)
public class InstallationDaoTest {
public static final String DEVICE_TOKEN_1 = "1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890";
public static final String DEVICE_TOKEN_2 = "67890167890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890";
@Inject
private EntityManager entityManager;
@Inject
private JPAInstallationDao installationDao;
private String androidVariantID = "1";
private String simplePushVariantID = "2";
@Deployment
public static JavaArchive createDeployment() {
return DaoDeployment.createDeployment();
}
@Rule
public EmbeddedDbTesterRule testDb = new EmbeddedDbTesterRule("Installations.xml");
@Before
public void setUp() {
entityManager.getTransaction().begin();
}
@After
public void tearDown() {
entityManager.getTransaction().rollback();
}
@Test
public void countDevicesForLoginName() {
assertThat(installationDao.getNumberOfDevicesForLoginName("me")).isEqualTo(6);
}
@Test
public void getNumberOfDevicesForVariantID() {
assertThat(installationDao.getNumberOfDevicesForVariantID("1")).isEqualTo(3);
assertThat(installationDao.getNumberOfDevicesForVariantID("2")).isEqualTo(3);
}
@Test
public void findDeviceTokensForOneInstallationOfOneVariant() {
String[] alias = { "foo@bar.org" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(androidVariantID, null, Arrays.asList(alias), null);
assertThat(tokens).hasSize(2);
Installation one = installationDao.findInstallationForVariantByDeviceToken(androidVariantID, DEVICE_TOKEN_1);
assertThat(one.getDeviceToken()).isEqualTo(DEVICE_TOKEN_1);
final Set<String> tokenz = new HashSet<String>();
tokenz.add(DEVICE_TOKEN_1);
tokenz.add("foobar223");
List<Installation> list = installationDao.findInstallationsForVariantByDeviceTokens(androidVariantID, tokenz);
assertThat(list).hasSize(1);
assertThat(list).extracting("deviceToken").containsOnly(DEVICE_TOKEN_1);
}
@Test
public void findDeviceTokensForAliasOfVariant() {
String[] alias = { "foo@bar.org" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(androidVariantID, null, Arrays.asList(alias), null);
assertThat(tokens).hasSize(2);
}
@Test
public void findNoDeviceTokensForAliasOfVariant() {
String[] alias = { "bar@foo.org" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(androidVariantID, null, Arrays.asList(alias), null);
assertThat(tokens).hasSize(0);
}
@Test
public void findDeviceTokensForAliasAndDeviceType() {
String[] alias = { "foo@bar.org" };
String[] types = { "Android Tablet" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(androidVariantID, null, Arrays.asList(alias), Arrays.asList(types));
assertThat(tokens).hasSize(1);
assertThat(tokens).containsOnly(DEVICE_TOKEN_2);
}
@Test
public void findNoDeviceTokensForAliasAndUnusedDeviceType() {
String[] alias = { "foo@bar.org" };
String[] types = { "Android Clock" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(androidVariantID, null, Arrays.asList(alias), Arrays.asList(types));
assertThat(tokens).isEmpty();
}
@Test
public void findZeroDeviceTokensForAliasAndCategoriesAndDeviceType() {
String[] alias = { "foo@bar.org" };
String[] types = { "Android Tablet" };
String[] categories = { "soccer" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(androidVariantID, Arrays.asList(categories), Arrays.asList(alias), Arrays
.asList(types));
assertThat(tokens).isEmpty();
}
@Test
public void findOneDeviceTokensForAliasAndCategoriesAndDeviceType() {
String[] alias = { "foo@bar.org" };
String[] types = { "Android Phone" };
String[] cats = { "soccer", "news", "weather" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(androidVariantID, Arrays.asList(cats), Arrays.asList(alias), Arrays.asList(types));
assertThat(tokens).hasSize(1);
assertThat(tokens).containsOnly(DEVICE_TOKEN_1);
}
@Test
public void findTwoDeviceTokensForAliasAndCategories() {
String[] alias = { "foo@bar.org" };
String[] cats = { "soccer", "news", "weather" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(androidVariantID, Arrays.asList(cats), Arrays.asList(alias), null);
assertThat(tokens).hasSize(2);
}
@Test
public void findTwoDeviceTokensCategories() {
String[] cats = { "soccer", "news", "weather" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(androidVariantID, Arrays.asList(cats), null, null);
assertThat(tokens).hasSize(2);
}
@Test
public void findAndDeleteOneInstallation() {
final Set<String> tokenz = new HashSet<String>();
tokenz.add(DEVICE_TOKEN_1);
tokenz.add("foobar223");
List<Installation> list = installationDao.findInstallationsForVariantByDeviceTokens(androidVariantID, tokenz);
assertThat(list).hasSize(1);
Installation installation = list.get(0);
assertThat(installation.getDeviceToken()).isEqualTo(DEVICE_TOKEN_1);
installationDao.delete(installation);
list = installationDao.findInstallationsForVariantByDeviceTokens(androidVariantID, tokenz);
assertThat(list).isEmpty();
}
@Test
public void findAndDeleteTwoInstallations() {
final Set<String> tokenz = new HashSet<String>();
tokenz.add(DEVICE_TOKEN_1);
tokenz.add(DEVICE_TOKEN_2);
List<Installation> list = installationDao.findInstallationsForVariantByDeviceTokens(androidVariantID, tokenz);
assertThat(list).hasSize(2);
for (Installation installation : list) {
installationDao.delete(installation);
}
list = installationDao.findInstallationsForVariantByDeviceTokens(androidVariantID, tokenz);
assertThat(list).hasSize(0);
}
@Test
public void deleteNonExistingInstallation() {
Installation installation = new Installation();
installation.setId("2345");
installationDao.delete(installation);
}
@Test
public void mergeCategories() {
//given
final SimplePushVariant variant = new SimplePushVariant();
entityManager.persist(variant);
final Installation installation = new Installation();
installation.setDeviceToken("http://test");
installation.setCategories(new HashSet<Category>(Arrays.asList(new Category("one"), new Category("two"))));
final Installation installation2 = new Installation();
installation2.setDeviceToken("http://test2");
installation2.setCategories(new HashSet<Category>(Arrays.asList(new Category("one"), new Category("three"))));
installation.setVariant(variant);
installation2.setVariant(variant);
//when
installationDao.create(installation);
//then
final List list = entityManager.createQuery("select c from Category c where c.name = 'one'").getResultList();
assertThat(list).hasSize(1);
}
@Test
public void findPushEndpointsForAlias() {
String[] alias = { "foo@bar.org" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(simplePushVariantID, null, Arrays.asList(alias), null);
assertThat(tokens).hasSize(3);
assertThat(tokens.get(0)).startsWith("http://server:8080/update/");
assertThat(tokens.get(1)).startsWith("http://server:8080/update/");
}
@Test
public void findZeroPushEndpointsForAliasAndCategories() {
String[] alias = { "foo@bar.org" };
String[] categories = { "US Football" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(simplePushVariantID, Arrays.asList(categories), Arrays.asList(alias), null);
assertThat(tokens).isEmpty();
}
@Test
public void findOnePushEndpointForAliasAndCategories() {
String[] alias = { "foo@bar.org" };
String[] cats = { "soccer", "weather" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(simplePushVariantID, Arrays.asList(cats), Arrays.asList(alias), null);
assertThat(tokens).hasSize(1);
assertThat(tokens.get(0)).startsWith("http://server:8080/update/");
}
@Test
public void findThreePushEndpointsForAliasAndCategories() {
String[] alias = { "foo@bar.org" };
String[] cats = { "soccer", "news", "weather" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(simplePushVariantID, Arrays.asList(cats), Arrays.asList(alias), null);
assertThat(tokens).hasSize(3);
assertThat(tokens.get(0)).startsWith("http://server:8080/update/");
assertThat(tokens.get(1)).startsWith("http://server:8080/update/");
assertThat(tokens.get(2)).startsWith("http://server:8080/update/");
}
@Test
public void findThreePushEndpointsForCategories() {
String[] cats = { "soccer", "news", "weather" };
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(simplePushVariantID, Arrays.asList(cats), null, null);
assertThat(tokens).hasSize(3);
assertThat(tokens.get(0)).startsWith("http://server:8080/update/");
assertThat(tokens.get(1)).startsWith("http://server:8080/update/");
assertThat(tokens.get(2)).startsWith("http://server:8080/update/");
}
@Test
public void findPushEndpointsWithDeviceType() {
String[] types = {"JavaFX Monitor"};
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(simplePushVariantID, null, null, Arrays.asList(types));
assertThat(tokens).hasSize(1);
assertThat(tokens.get(0)).startsWith("http://server:8080/update/");
}
@Test
public void findPushEndpointsWithoutDeviceType() {
List<String> tokens = findAllDeviceTokenForVariantIDByCriteria(simplePushVariantID, null, null, null);
assertThat(tokens).hasSize(3);
assertThat(tokens.get(0)).startsWith("http://server:8080/update/");
}
@Test
public void shouldValidateDeviceId() {
// given
final Installation installation = new Installation();
installation.setDeviceToken("invalid");
final iOSVariant variant = new iOSVariant();
variant.setPassphrase("12");
variant.setCertificate("12".getBytes());
entityManager.persist(variant);
installation.setVariant(variant);
// when
installationDao.create(installation);
try {
entityManager.flush();
fail("ConstraintViolationException should have been thrown");
} catch (ConstraintViolationException violationException) {
// then
final Set<ConstraintViolation<?>> constraintViolations = violationException.getConstraintViolations();
assertThat(constraintViolations).isNotEmpty();
assertThat(constraintViolations.size()).isEqualTo(1);
assertThat(constraintViolations.iterator().next().getMessage()).isEqualTo(
"Device token is not valid for this device type");
}
}
@Test
public void shouldSaveWhenValidateDeviceIdIOS() {
// given
final Installation installation = new Installation();
installation.setDeviceToken("1ce51dad49a77ca7b45924074bcc4f19aea20378f5feda202fbba3beed7073d7");
final iOSVariant variant = new iOSVariant();
variant.setPassphrase("12");
variant.setCertificate("12".getBytes());
// when
deviceTokenTest(installation, variant);
}
@Test
public void shouldSaveWhenValidateDeviceIdWindows() {
// given
final Installation installation = new Installation();
installation.setDeviceToken("https://db3.notify.windows.com/?token=AgYAAACH%2fZixlZK4v%2bkD3LFiz7zHOJm13"
+ "smBVRn8rH%2b32Xu6tv3fj%2fh8bb4VhNTS7NqS8TclpW044YxAbaN%2bB4NjpyVSZs3He7SwwjExbEsBFRLYc824%2f0"
+ "615fPox8bwoxrTU%3d");
final WindowsWNSVariant variant = new WindowsWNSVariant();
variant.setClientSecret("12");
variant.setSid("12");
// when
deviceTokenTest(installation, variant);
}
@Test
public void shouldSaveWhenValidateDeviceIdAdm() {
// given
final Installation installation = new Installation();
installation.setDeviceToken("amzn1.adm-registration.v3.Y29tLmFtYXpvbi5EZXZpY2VNZXNzYWdpbmcuUmVnaXN0cmF0a" +
"W9uSWRFbmNyeXB0aW9uS2V5ITEhWTlLSFlBZDlOSU12cTUzdlpIQzZJd3VZVk9CZ0g1bUdWUkJrL0hOTkZ5UGFPN1FxY3pP" +
"WXJVL0laWGdrczVKU1MwSG8rVDUva2hkS3h5WjE4YUZHM3NoTXpOMUxCa2tORDdsY2FxemVxcG5lWXR1eC9UeHZMTWVScUY" +
"wT3JwUXFzZFFCMi9vaHhmQjk2dERwK29JNEtFTm1TRGhLMFhnd0FPT3FPWGRwMi9GQllNSmN5TVh4YlZ4VlNQdVcvbHEveU" +
"JkZExoMTdrZnNaVWpOMGlVMTBDbndkNERSd3Z4VjlpVm9hUy9mTXhLdUsxSVV5cjY1cngrQWYwdjN4WGxvWWJGL3ZDNXF6T" +
"2FPa0JTL3Z6bGtxUUFUN3h4bXg1YitBTHlpbGkxazdJbHBIVm1PUm0rUkgveDFOdzFDQUVhQ1BXcE1Ud3ZpY2ROcUxGWlRt" +
"VFM2bml3PT0hQVcwQ2puM3g2THgvaWJ0cE9nMzBEUT09");
final AdmVariant variant = new AdmVariant();
variant.setClientSecret("12");
variant.setClientId("12");
// when
deviceTokenTest(installation, variant);
}
@Test(expected = ConstraintViolationException.class)
public void shouldNotSaveWhenSimplePushTokenInvalid() {
// given
final Installation installation = new Installation();
installation.setDeviceToken("htp://invalid");
final SimplePushVariant variant = new SimplePushVariant();
// when
deviceTokenTest(installation, variant);
}
@Test
public void shouldSaveWhenValidateDeviceIdMPNSWindows() {
// given
final Installation installation = new Installation();
installation.setDeviceToken("https://s.notify.live.net/u/1/db3/HmQAAACsY7ZBMnNW6QnfPcHXC1gwvHFlPeujLy"
+ "aLyoJmTm79gofALwJGBefhxH_Rjpz4oAoK5O5zL2nQwaFZpLMpXUP/d2luZG93c3Bob25lZGVmYXVsdA/AGVGhYlaBG"
+ "GphX2C8gGmg/vedAL_DKqnF00b4O3NCIifacDEQ");
WindowsMPNSVariant variant = new WindowsMPNSVariant();
// when
deviceTokenTest(installation, variant);
}
@Test
public void shouldSaveWhenSimplePushTokenValid() {
// given
final Installation installation = new Installation();
installation.setDeviceToken("http://valid/but/you/should/use/https");
final SimplePushVariant variant = new SimplePushVariant();
// when
deviceTokenTest(installation, variant);
}
@Test
public void shouldSaveWhenValidateDeviceIdAndroid() {
// given
final Installation installation = new Installation();
installation.setDeviceToken("APA91bHpbMXepp4odlb20vYOv0gQyNIyFu2X3OXR3TjqR8qecgWivima_UiLPFgUBs_10Nys2TUwUy"
+ "WlixrIta35NXW-5Z85OdXcbb_3s3p0qaa_a7NpFlaX9GpidK_BdQNMsx2gX8BrE4Uw7s22nPCcEn1U1_mo-"
+ "T6hcF5unYt965PDwRTRss8");
final AndroidVariant variant = new AndroidVariant();
variant.setGoogleKey("12");
variant.setProjectNumber("12");
// when
deviceTokenTest(installation, variant);
}
private void deviceTokenTest(Installation installation, Variant variant) {
entityManager.persist(variant);
installation.setVariant(variant);
// when
installationDao.create(installation);
entityManager.flush();
}
@Test
public void primaryKeyUnmodifiedAfterUpdate() {
Installation android1 = new Installation();
android1.setAlias("foo@bar.org");
android1.setDeviceToken(DEVICE_TOKEN_1);
android1.setDeviceType("Android Phone");
final Set<Category> categoriesOne = new HashSet<Category>();
final Category category = entityManager.createQuery("from Category where name = :name", Category.class)
.setParameter("name", "soccer").getSingleResult();
categoriesOne.add(category);
android1.setCategories(categoriesOne);
final String id = android1.getId();
final AndroidVariant variant = new AndroidVariant();
variant.setGoogleKey("12");
variant.setProjectNumber("12");
entityManager.persist(variant);
android1.setVariant(variant);
installationDao.create(android1);
// flush to be sure that it's in the database
entityManager.flush();
// clear the cache otherwise finding the entity will not perform a select but get the entity from cache
entityManager.clear();
Installation installation = installationDao.find(id);
assertThat(installation.getId()).isEqualTo(id);
assertThat(installation.getDeviceType()).isEqualTo("Android Phone");
final String alias = "foobar@bar.org";
android1.setAlias(alias);
installationDao.update(android1);
entityManager.flush();
entityManager.clear();
installation = installationDao.find(id);
assertThat(installation.getAlias()).isEqualTo(alias);
}
@Test
public void shouldSelectInstallationsByVariantForDeveloper() {
//given
String developer = "me";
//when
final PageResult pageResult = installationDao.findInstallationsByVariantForDeveloper(androidVariantID, developer, 0, 1);
//then
assertThat(pageResult).isNotNull();
assertThat(pageResult.getResultList()).isNotEmpty().hasSize(1);
assertThat(pageResult.getCount()).isEqualTo(3);
}
@Test
public void shouldSelectInstallationsByVariant() {
//when
final PageResult pageResult = installationDao.findInstallationsByVariant(androidVariantID, 0, 1);
//then
assertThat(pageResult).isNotNull();
assertThat(pageResult.getResultList()).isNotEmpty().hasSize(1);
assertThat(pageResult.getCount()).isEqualTo(3);
}
@Test(expected = PersistenceException.class)
public void testTooLongDeviceToken() {
AndroidVariant variant = new AndroidVariant();
variant.setGoogleKey("123");
variant.setProjectNumber("123");
entityManager.persist(variant);
Installation android1 = new Installation();
android1.setAlias("foo@bar.org");
android1.setDeviceToken(TestUtils.longString(4097));
android1.setVariant(variant);
installationDao.create(android1);
entityManager.flush();
}
@Test
public void testLongDeviceToken() {
AndroidVariant variant = new AndroidVariant();
variant.setGoogleKey("123");
variant.setProjectNumber("123");
entityManager.persist(variant);
Installation android1 = new Installation();
android1.setAlias("foo@bar.org");
android1.setDeviceToken(TestUtils.longString(4096));
android1.setVariant(variant);
installationDao.create(android1);
entityManager.flush();
}
private List<String> findAllDeviceTokenForVariantIDByCriteria(String variantID, List<String> categories, List<String> aliases, List<String> deviceTypes) {
try {
ResultsStream<String> tokenStream = installationDao.findAllDeviceTokenForVariantIDByCriteria(variantID, categories, aliases, deviceTypes, Integer.MAX_VALUE, null).executeQuery();
List<String> list = new ArrayList<String>();
while (tokenStream.next()) {
list.add(tokenStream.get());
}
return list;
} catch (ResultStreamException e) {
throw new IllegalStateException(e);
}
}
}
| |
/**
* $URL$
* $Id$
*
* Copyright (c) 2006-2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.sitestats.impl.report;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import org.sakaiproject.entity.api.ContentExistsAware;
import org.sakaiproject.entity.api.Entity;
import org.sakaiproject.entity.api.EntityProducer;
import org.sakaiproject.entity.api.EntityTransferrer;
import org.sakaiproject.entity.api.HttpAccess;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.entity.api.ResourceProperties;
import org.sakaiproject.entity.cover.EntityManager;
import org.sakaiproject.sitestats.api.StatsManager;
import org.sakaiproject.sitestats.api.report.ReportDef;
import org.sakaiproject.sitestats.api.report.ReportManager;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
public class ReportDefEntityProducer implements EntityProducer, EntityTransferrer, ContentExistsAware {
private ReportManager M_rm;
// --- Sakai services --------------------------------
public void init() {
EntityManager.registerEntityProducer(this, ReportDefEntityProvider.REFERENCE_ROOT);
}
public void setReportManager(ReportManager reportManager) {
this.M_rm = reportManager;
}
// --- EntityTransferrer -----------------------------
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityTransferrer#myToolIds()
*/
public String[] myToolIds() {
return new String[]{StatsManager.SITESTATS_TOOLID};
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityTransferrer#transferCopyEntities(java.lang.String, java.lang.String, java.util.List)
*/
public void transferCopyEntities(String fromContext, String toContext, List ids) {
transferCopyEntities(fromContext, toContext, ids, false);
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityTransferrer#transferCopyEntities(java.lang.String, java.lang.String, java.util.List, boolean)
*/
public void transferCopyEntities(String fromContext, String toContext, List ids, boolean cleanup) {
// determine report definitions to copy
List<ReportDef> list = null;
if(ids != null && ids.size() > 0) {
list = new ArrayList<ReportDef>();
for(String id : (List<String>) ids) {
ReportDef rd = M_rm.getReportDefinition(Long.valueOf(id));
if(rd != null) {
list.add(rd);
}
}
}else{
list = M_rm.getReportDefinitions(fromContext, false, true);
}
// cleanup existing reports on destination site before copying
if(cleanup) {
List<ReportDef> listToCleanUp = M_rm.getReportDefinitions(toContext, false, true);
for(ReportDef rd : listToCleanUp) {
M_rm.removeReportDefinition(rd);
}
}
// copy to destination
for(ReportDef rd : list) {
rd.setId(0);
rd.setSiteId(toContext);
rd.getReportParams().setSiteId(toContext);
M_rm.saveReportDefinition(rd);
}
}
// --- EntityProducer --------------------------------
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityProducer#getLabel()
*/
public String getLabel() {
return ReportDefEntityProvider.LABEL;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityProducer#willArchiveMerge()
*/
public boolean willArchiveMerge() {
return false;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityProducer#archive(java.lang.String, org.w3c.dom.Document, java.util.Stack, java.lang.String, java.util.List)
*/
public String archive(String siteId, Document doc, Stack stack, String archivePath, List attachments) {
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityProducer#merge(java.lang.String, org.w3c.dom.Element, java.lang.String, java.lang.String, java.util.Map, java.util.Map, java.util.Set)
*/
public String merge(String siteId, Element root, String archivePath, String fromSiteId, Map attachmentNames, Map userIdTrans, Set userListAllowImport) {
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityProducer#parseEntityReference(java.lang.String, org.sakaiproject.entity.api.Reference)
*/
public boolean parseEntityReference(String reference, Reference ref) {
return false;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityProducer#getEntityDescription(org.sakaiproject.entity.api.Reference)
*/
public String getEntityDescription(Reference ref) {
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityProducer#getEntityResourceProperties(org.sakaiproject.entity.api.Reference)
*/
public ResourceProperties getEntityResourceProperties(Reference ref) {
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityProducer#getEntity(org.sakaiproject.entity.api.Reference)
*/
public Entity getEntity(Reference ref) {
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityProducer#getEntityUrl(org.sakaiproject.entity.api.Reference)
*/
public String getEntityUrl(Reference ref) {
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityProducer#getEntityAuthzGroups(org.sakaiproject.entity.api.Reference, java.lang.String)
*/
public Collection getEntityAuthzGroups(Reference ref, String userId) {
return null;
}
/* (non-Javadoc)
* @see org.sakaiproject.entity.api.EntityProducer#getHttpAccess()
*/
public HttpAccess getHttpAccess() {
return null;
}
/**
* This implementation simply checks if we have reports in the site. If so, consider it content.
*
* @see org.sakaiproject.entity.api.ContentExistsAware#hasContent()
*/
@Override
public boolean hasContent(String siteId) {
List<ReportDef> existingReportDefinitions = M_rm.getReportDefinitions(siteId, false, true);
if(!existingReportDefinitions.isEmpty()){
return true;
}
return false;
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vision/v1p3beta1/image_annotator.proto
package com.google.cloud.vision.v1p3beta1;
/**
*
*
* <pre>
* Set of crop hints that are used to generate new crops when serving images.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1p3beta1.CropHintsAnnotation}
*/
public final class CropHintsAnnotation extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vision.v1p3beta1.CropHintsAnnotation)
CropHintsAnnotationOrBuilder {
private static final long serialVersionUID = 0L;
// Use CropHintsAnnotation.newBuilder() to construct.
private CropHintsAnnotation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CropHintsAnnotation() {
cropHints_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CropHintsAnnotation();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private CropHintsAnnotation(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
cropHints_ = new java.util.ArrayList<com.google.cloud.vision.v1p3beta1.CropHint>();
mutable_bitField0_ |= 0x00000001;
}
cropHints_.add(
input.readMessage(
com.google.cloud.vision.v1p3beta1.CropHint.parser(), extensionRegistry));
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
cropHints_ = java.util.Collections.unmodifiableList(cropHints_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p3beta1_CropHintsAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p3beta1_CropHintsAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1p3beta1.CropHintsAnnotation.class,
com.google.cloud.vision.v1p3beta1.CropHintsAnnotation.Builder.class);
}
public static final int CROP_HINTS_FIELD_NUMBER = 1;
private java.util.List<com.google.cloud.vision.v1p3beta1.CropHint> cropHints_;
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.vision.v1p3beta1.CropHint> getCropHintsList() {
return cropHints_;
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.vision.v1p3beta1.CropHintOrBuilder>
getCropHintsOrBuilderList() {
return cropHints_;
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
@java.lang.Override
public int getCropHintsCount() {
return cropHints_.size();
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
@java.lang.Override
public com.google.cloud.vision.v1p3beta1.CropHint getCropHints(int index) {
return cropHints_.get(index);
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
@java.lang.Override
public com.google.cloud.vision.v1p3beta1.CropHintOrBuilder getCropHintsOrBuilder(int index) {
return cropHints_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < cropHints_.size(); i++) {
output.writeMessage(1, cropHints_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < cropHints_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, cropHints_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vision.v1p3beta1.CropHintsAnnotation)) {
return super.equals(obj);
}
com.google.cloud.vision.v1p3beta1.CropHintsAnnotation other =
(com.google.cloud.vision.v1p3beta1.CropHintsAnnotation) obj;
if (!getCropHintsList().equals(other.getCropHintsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCropHintsCount() > 0) {
hash = (37 * hash) + CROP_HINTS_FIELD_NUMBER;
hash = (53 * hash) + getCropHintsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vision.v1p3beta1.CropHintsAnnotation parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p3beta1.CropHintsAnnotation parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p3beta1.CropHintsAnnotation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p3beta1.CropHintsAnnotation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p3beta1.CropHintsAnnotation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p3beta1.CropHintsAnnotation parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p3beta1.CropHintsAnnotation parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p3beta1.CropHintsAnnotation parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1p3beta1.CropHintsAnnotation parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p3beta1.CropHintsAnnotation parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1p3beta1.CropHintsAnnotation parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p3beta1.CropHintsAnnotation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.vision.v1p3beta1.CropHintsAnnotation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Set of crop hints that are used to generate new crops when serving images.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1p3beta1.CropHintsAnnotation}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p3beta1.CropHintsAnnotation)
com.google.cloud.vision.v1p3beta1.CropHintsAnnotationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p3beta1_CropHintsAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p3beta1_CropHintsAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1p3beta1.CropHintsAnnotation.class,
com.google.cloud.vision.v1p3beta1.CropHintsAnnotation.Builder.class);
}
// Construct using com.google.cloud.vision.v1p3beta1.CropHintsAnnotation.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCropHintsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (cropHintsBuilder_ == null) {
cropHints_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
cropHintsBuilder_.clear();
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p3beta1_CropHintsAnnotation_descriptor;
}
@java.lang.Override
public com.google.cloud.vision.v1p3beta1.CropHintsAnnotation getDefaultInstanceForType() {
return com.google.cloud.vision.v1p3beta1.CropHintsAnnotation.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vision.v1p3beta1.CropHintsAnnotation build() {
com.google.cloud.vision.v1p3beta1.CropHintsAnnotation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vision.v1p3beta1.CropHintsAnnotation buildPartial() {
com.google.cloud.vision.v1p3beta1.CropHintsAnnotation result =
new com.google.cloud.vision.v1p3beta1.CropHintsAnnotation(this);
int from_bitField0_ = bitField0_;
if (cropHintsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
cropHints_ = java.util.Collections.unmodifiableList(cropHints_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.cropHints_ = cropHints_;
} else {
result.cropHints_ = cropHintsBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vision.v1p3beta1.CropHintsAnnotation) {
return mergeFrom((com.google.cloud.vision.v1p3beta1.CropHintsAnnotation) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vision.v1p3beta1.CropHintsAnnotation other) {
if (other == com.google.cloud.vision.v1p3beta1.CropHintsAnnotation.getDefaultInstance())
return this;
if (cropHintsBuilder_ == null) {
if (!other.cropHints_.isEmpty()) {
if (cropHints_.isEmpty()) {
cropHints_ = other.cropHints_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCropHintsIsMutable();
cropHints_.addAll(other.cropHints_);
}
onChanged();
}
} else {
if (!other.cropHints_.isEmpty()) {
if (cropHintsBuilder_.isEmpty()) {
cropHintsBuilder_.dispose();
cropHintsBuilder_ = null;
cropHints_ = other.cropHints_;
bitField0_ = (bitField0_ & ~0x00000001);
cropHintsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getCropHintsFieldBuilder()
: null;
} else {
cropHintsBuilder_.addAllMessages(other.cropHints_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.vision.v1p3beta1.CropHintsAnnotation parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.vision.v1p3beta1.CropHintsAnnotation) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.vision.v1p3beta1.CropHint> cropHints_ =
java.util.Collections.emptyList();
private void ensureCropHintsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
cropHints_ =
new java.util.ArrayList<com.google.cloud.vision.v1p3beta1.CropHint>(cropHints_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1p3beta1.CropHint,
com.google.cloud.vision.v1p3beta1.CropHint.Builder,
com.google.cloud.vision.v1p3beta1.CropHintOrBuilder>
cropHintsBuilder_;
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public java.util.List<com.google.cloud.vision.v1p3beta1.CropHint> getCropHintsList() {
if (cropHintsBuilder_ == null) {
return java.util.Collections.unmodifiableList(cropHints_);
} else {
return cropHintsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public int getCropHintsCount() {
if (cropHintsBuilder_ == null) {
return cropHints_.size();
} else {
return cropHintsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public com.google.cloud.vision.v1p3beta1.CropHint getCropHints(int index) {
if (cropHintsBuilder_ == null) {
return cropHints_.get(index);
} else {
return cropHintsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public Builder setCropHints(int index, com.google.cloud.vision.v1p3beta1.CropHint value) {
if (cropHintsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCropHintsIsMutable();
cropHints_.set(index, value);
onChanged();
} else {
cropHintsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public Builder setCropHints(
int index, com.google.cloud.vision.v1p3beta1.CropHint.Builder builderForValue) {
if (cropHintsBuilder_ == null) {
ensureCropHintsIsMutable();
cropHints_.set(index, builderForValue.build());
onChanged();
} else {
cropHintsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public Builder addCropHints(com.google.cloud.vision.v1p3beta1.CropHint value) {
if (cropHintsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCropHintsIsMutable();
cropHints_.add(value);
onChanged();
} else {
cropHintsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public Builder addCropHints(int index, com.google.cloud.vision.v1p3beta1.CropHint value) {
if (cropHintsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCropHintsIsMutable();
cropHints_.add(index, value);
onChanged();
} else {
cropHintsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public Builder addCropHints(
com.google.cloud.vision.v1p3beta1.CropHint.Builder builderForValue) {
if (cropHintsBuilder_ == null) {
ensureCropHintsIsMutable();
cropHints_.add(builderForValue.build());
onChanged();
} else {
cropHintsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public Builder addCropHints(
int index, com.google.cloud.vision.v1p3beta1.CropHint.Builder builderForValue) {
if (cropHintsBuilder_ == null) {
ensureCropHintsIsMutable();
cropHints_.add(index, builderForValue.build());
onChanged();
} else {
cropHintsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public Builder addAllCropHints(
java.lang.Iterable<? extends com.google.cloud.vision.v1p3beta1.CropHint> values) {
if (cropHintsBuilder_ == null) {
ensureCropHintsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, cropHints_);
onChanged();
} else {
cropHintsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public Builder clearCropHints() {
if (cropHintsBuilder_ == null) {
cropHints_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
cropHintsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public Builder removeCropHints(int index) {
if (cropHintsBuilder_ == null) {
ensureCropHintsIsMutable();
cropHints_.remove(index);
onChanged();
} else {
cropHintsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public com.google.cloud.vision.v1p3beta1.CropHint.Builder getCropHintsBuilder(int index) {
return getCropHintsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public com.google.cloud.vision.v1p3beta1.CropHintOrBuilder getCropHintsOrBuilder(int index) {
if (cropHintsBuilder_ == null) {
return cropHints_.get(index);
} else {
return cropHintsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public java.util.List<? extends com.google.cloud.vision.v1p3beta1.CropHintOrBuilder>
getCropHintsOrBuilderList() {
if (cropHintsBuilder_ != null) {
return cropHintsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(cropHints_);
}
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public com.google.cloud.vision.v1p3beta1.CropHint.Builder addCropHintsBuilder() {
return getCropHintsFieldBuilder()
.addBuilder(com.google.cloud.vision.v1p3beta1.CropHint.getDefaultInstance());
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public com.google.cloud.vision.v1p3beta1.CropHint.Builder addCropHintsBuilder(int index) {
return getCropHintsFieldBuilder()
.addBuilder(index, com.google.cloud.vision.v1p3beta1.CropHint.getDefaultInstance());
}
/**
*
*
* <pre>
* Crop hint results.
* </pre>
*
* <code>repeated .google.cloud.vision.v1p3beta1.CropHint crop_hints = 1;</code>
*/
public java.util.List<com.google.cloud.vision.v1p3beta1.CropHint.Builder>
getCropHintsBuilderList() {
return getCropHintsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1p3beta1.CropHint,
com.google.cloud.vision.v1p3beta1.CropHint.Builder,
com.google.cloud.vision.v1p3beta1.CropHintOrBuilder>
getCropHintsFieldBuilder() {
if (cropHintsBuilder_ == null) {
cropHintsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1p3beta1.CropHint,
com.google.cloud.vision.v1p3beta1.CropHint.Builder,
com.google.cloud.vision.v1p3beta1.CropHintOrBuilder>(
cropHints_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
cropHints_ = null;
}
return cropHintsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p3beta1.CropHintsAnnotation)
}
// @@protoc_insertion_point(class_scope:google.cloud.vision.v1p3beta1.CropHintsAnnotation)
private static final com.google.cloud.vision.v1p3beta1.CropHintsAnnotation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vision.v1p3beta1.CropHintsAnnotation();
}
public static com.google.cloud.vision.v1p3beta1.CropHintsAnnotation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CropHintsAnnotation> PARSER =
new com.google.protobuf.AbstractParser<CropHintsAnnotation>() {
@java.lang.Override
public CropHintsAnnotation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CropHintsAnnotation(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CropHintsAnnotation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CropHintsAnnotation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vision.v1p3beta1.CropHintsAnnotation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2000-2003 by Model N, Inc. All Rights Reserved.
*
* This software is the confidential and proprietary information
* of Model N, Inc ("Confidential Information"). You shall not
* disclose such Confidential Information and shall use it only
* in accordance with the terms of the license agreement you
* entered into with Model N, Inc.
*/
package com.modeln.build.ant;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.IOException;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.BuildException;
/**
* Obtain the value of a Java system property and store the value
* as an Ant property. This task allows you to fork off a separate
* JVM so that it can query the system properties under that JVM.
*
* @author Shawn Stafford
*/
public final class SystemPropertyTask extends Task {
/** Name of the system property being returned. */
private String propname = null;
/** Default value of the system property returned by the Ant task. */
private String defaultvalue = "";
/** Name of the Ant property used to return the value to Ant. */
private String returnprop = "system.property.value";
/** Path to the Java executable. */
private String executable = null;
/** Boolean flag to indicate that the task should be executed as a forked process. */
private boolean fork = false;
/**
* Set the name of the system property being returned.
*
* @param name Name of the system property to query
*/
public void setName(String name) {
propname = name;
}
/**
* Set the name of the Ant property where the value should be saved.
*
* @param name Name of the ant property to return
*/
public void setProperty(String name) {
returnprop = name;
}
/**
* Set the default value of the system property if none is found.
*
* @param value Default property value
*/
public void setDefault(String value) {
defaultvalue = value;
}
/**
* Set the Java executable that should be used to execute the task
* as a forked process. This property is only used if the fork
* attribute is true.
*
* @param path Path to the java executable
*/
public void setExecutable(String path) {
executable = path;
}
/**
* Execute the task as a forked process when true, otherwise execute
* the task within the Ant JVM. If fork is true, the executable
* attribute must also be set.
*
* @param enabled TRUE if the task should execute in a forked process
*/
public void setFork(boolean enabled) {
fork = enabled;
}
/**
* Obtain the system property value and return as an Ant property.
*/
public void execute() throws BuildException {
String value = null;
if (fork) {
value = executeForked();
} else {
value = executeLocal();
}
// Use the default value if none can be obtained
if (value == null) {
value = defaultvalue;
}
// Return the property value
getProject().setProperty(returnprop, value);
}
/**
* Execute the task in the current JVM.
*/
private String executeLocal() throws BuildException {
String value = null;
try {
value = System.getProperty(propname);
} catch (SecurityException sex) {
throw new BuildException("Unable to access the system property: " + propname, sex, getLocation());
} catch (NullPointerException npex) {
throw new BuildException("A system property name must be specified.", npex, getLocation());
} catch (IllegalArgumentException iaex) {
throw new BuildException("The system property name must not be empty.", iaex, getLocation());
}
return value;
}
/**
* Execute the task in a forked JVM instance.
*/
private String executeForked() throws BuildException {
// Don't even bother doing anything if the executable isn't specified
if ((executable == null) || (executable.length() == 0)) {
throw new BuildException("The executable attribute must be set if fork is enabled.", getLocation());
}
String value = null;
try {
// Figure out what the current classpath is so we can find the class to execute
String classpath = System.getProperty("java.class.path");
String[] cmd = { executable, "-classpath", classpath, getClass().getName(), propname };
Process proc = Runtime.getRuntime().exec(cmd);
int exitValue = proc.waitFor();
if (exitValue == 0) {
BufferedReader stdout = new BufferedReader(new InputStreamReader(proc.getInputStream()));
value = stdout.readLine();
} else {
BufferedReader stdout = new BufferedReader(new InputStreamReader(proc.getErrorStream()));
throw new BuildException(stdout.readLine(), getLocation());
}
} catch (IOException ioex) {
throw new BuildException("Unable to access forked process.", ioex, getLocation());
} catch (SecurityException sex) {
throw new BuildException("Unable to access the system property: " + propname, sex, getLocation());
} catch (NullPointerException npex) {
throw new BuildException("A system property name must be specified.", npex, getLocation());
} catch (InterruptedException iex) {
throw new BuildException("The forked process has been interrupted.", iex, getLocation());
} catch (IllegalArgumentException iaex) {
throw new BuildException("The system property name must not be empty.", iaex, getLocation());
}
return value;
}
/**
* Process the command line arguments and execute the program
* accordingly.
*/
public static void main(String[] args) {
String propname = args[0];
// Obtain the property value from the environment
String value = null;
try {
value = System.getProperty(propname);
} catch (SecurityException sex) {
sex.printStackTrace(System.out);
System.err.println("Unable to access the forked system property: " + propname);
System.exit(1);
} catch (NullPointerException npex) {
npex.printStackTrace(System.out);
System.err.println("A forked system property name must be specified.");
System.exit(1);
} catch (IllegalArgumentException iaex) {
iaex.printStackTrace(System.out);
System.err.println("The forked system property name must not be empty.");
System.exit(1);
}
// Return the property value
if (value != null) {
System.out.println(value);
} else {
System.err.println("The forked property value was null: " + propname);
System.exit(2);
}
}
}
| |
package org.strangeforest.tcb.stats.model.core;
import java.time.*;
import org.strangeforest.tcb.util.*;
public class Player {
// General
private final int id;
private String name;
private LocalDate dob;
private LocalDate dod;
private int age;
private Country country;
private String birthplace;
private String residence;
private int height;
private int weight;
// Tennis
private String hand;
private String backhand;
private boolean active;
private int turnedPro;
private String coach;
private String prizeMoney;
// Social
private String wikipedia;
private String webSite;
private String facebook;
private String twitter;
private String nicknames;
// Titles
private int titles;
private int grandSlams;
private int tourFinals;
private int altFinals;
private int masters;
private int olympics;
// Ranking
private int currentRank;
private int currentRankPoints;
private int bestRank;
private LocalDate bestRankDate;
private int currentEloRank;
private int currentEloRating;
private int bestEloRank;
private LocalDate bestEloRankDate;
private int bestEloRating;
private LocalDate bestEloRatingDate;
private int goatRank;
private int goatPoints;
private int weeksAtNo1;
public Player(int id) {
this.id = id;
}
public int getId() {
return id;
}
// General
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public LocalDate getDob() {
return dob;
}
public void setDob(LocalDate dob) {
this.dob = dob;
}
public LocalDate getDod() {
return dod;
}
public void setDod(LocalDate dod) {
this.dod = dod;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public Country getCountry() {
return country;
}
public void setCountryId(String countryId) {
country = new Country(countryId);
}
public String getBirthplace() {
return birthplace;
}
public void setBirthplace(String birthplace) {
this.birthplace = birthplace;
}
public String getResidence() {
return residence;
}
public void setResidence(String residence) {
this.residence = residence;
}
public int getHeight() {
return height;
}
public void setHeight(int height) {
this.height = height;
}
public int getWeight() {
return weight;
}
public void setWeight(int weight) {
this.weight = weight;
}
// Tennis
public String getHand() {
return hand;
}
public String getHandName() {
switch (hand) {
case "L": return "Left-handed";
case "R": return "Right-handed";
default: return null;
}
}
public void setHand(String hand) {
this.hand = hand;
}
public String getBackhand() {
return backhand;
}
public String getBackhandName() {
switch (backhand) {
case "1": return "One-handed";
case "2": return "Two-handed";
default: return null;
}
}
public void setBackhand(String backhand) {
this.backhand = backhand;
}
public boolean isActive() {
return active;
}
public void setActive(boolean active) {
this.active = active;
}
public int getTurnedPro() {
return turnedPro;
}
public void setTurnedPro(int turnedPro) {
this.turnedPro = turnedPro;
}
public String getCoach() {
return coach;
}
public void setCoach(String coach) {
this.coach = coach;
}
public String getPrizeMoney() {
return prizeMoney;
}
public void setPrizeMoney(String prizeMoney) {
this.prizeMoney = prizeMoney;
}
// Social
public String getWikipedia() {
return wikipedia;
}
public void setWikipedia(String wikipedia) {
this.wikipedia = wikipedia;
}
public String getWebSite() {
return webSite;
}
public void setWebSite(String webSite) {
this.webSite = webSite;
}
public String getFacebook() {
return facebook;
}
public void setFacebook(String facebook) {
this.facebook = facebook;
}
public String getTwitter() {
return twitter;
}
public void setTwitter(String twitter) {
this.twitter = twitter;
}
public String getNicknames() {
return nicknames;
}
public void setNicknames(String nicknames) {
this.nicknames = nicknames;
}
// Titles
public int getTitles() {
return titles;
}
public void setTitles(int titles) {
this.titles = titles;
}
public int getGrandSlams() {
return grandSlams;
}
public void setGrandSlams(int grandSlams) {
this.grandSlams = grandSlams;
}
public int getTourFinals() {
return tourFinals;
}
public void setTourFinals(int tourFinals) {
this.tourFinals = tourFinals;
}
public int getAltFinals() {
return altFinals;
}
public void setAltFinals(int altFinals) {
this.altFinals = altFinals;
}
public int getMasters() {
return masters;
}
public void setMasters(int masters) {
this.masters = masters;
}
public int getOlympics() {
return olympics;
}
public void setOlympics(int olympics) {
this.olympics = olympics;
}
// Ranking
public int getCurrentRank() {
return currentRank;
}
public void setCurrentRank(int currentRank) {
this.currentRank = currentRank;
}
public int getCurrentRankPoints() {
return currentRankPoints;
}
public void setCurrentRankPoints(int currentRankPoints) {
this.currentRankPoints = currentRankPoints;
}
public int getBestRank() {
return bestRank;
}
public void setBestRank(int bestRank) {
this.bestRank = bestRank;
}
public LocalDate getBestRankDate() {
return bestRankDate;
}
public void setBestRankDate(LocalDate bestRankDate) {
this.bestRankDate = bestRankDate;
}
public int getCurrentEloRank() {
return currentEloRank;
}
public void setCurrentEloRank(int currentEloRank) {
this.currentEloRank = currentEloRank;
}
public int getCurrentEloRating() {
return currentEloRating;
}
public void setCurrentEloRating(int currentEloRating) {
this.currentEloRating = currentEloRating;
}
public int getBestEloRank() {
return bestEloRank;
}
public void setBestEloRank(int bestEloRank) {
this.bestEloRank = bestEloRank;
}
public LocalDate getBestEloRankDate() {
return bestEloRankDate;
}
public void setBestEloRankDate(LocalDate bestEloRankDate) {
this.bestEloRankDate = bestEloRankDate;
}
public int getBestEloRating() {
return bestEloRating;
}
public void setBestEloRating(int bestEloRating) {
this.bestEloRating = bestEloRating;
}
public LocalDate getBestEloRatingDate() {
return bestEloRatingDate;
}
public void setBestEloRatingDate(LocalDate bestEloRatingDate) {
this.bestEloRatingDate = bestEloRatingDate;
}
public int getGoatRank() {
return goatRank;
}
public void setGoatRank(int goatRank) {
this.goatRank = goatRank;
}
public int getGoatPoints() {
return goatPoints;
}
public void setGoatPoints(int goatPoints) {
this.goatPoints = goatPoints;
}
public int getWeeksAtNo1() {
return weeksAtNo1;
}
public void setWeeksAtNo1(int weeksAtNo1) {
this.weeksAtNo1 = weeksAtNo1;
}
}
| |
/**
* Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.interestrate.swaption.provider;
import static org.testng.AssertJUnit.assertEquals;
import java.util.Arrays;
import org.testng.annotations.Test;
import org.threeten.bp.Period;
import org.threeten.bp.ZonedDateTime;
import com.mcleodmoores.date.WorkingDayCalendar;
import com.opengamma.analytics.financial.instrument.index.GeneratorSwapFixedIbor;
import com.opengamma.analytics.financial.instrument.index.GeneratorSwapFixedIborMaster;
import com.opengamma.analytics.financial.instrument.index.IborIndex;
import com.opengamma.analytics.financial.instrument.swap.SwapFixedIborDefinition;
import com.opengamma.analytics.financial.instrument.swaption.SwaptionPhysicalFixedIborDefinition;
import com.opengamma.analytics.financial.interestrate.annuity.derivative.AnnuityPaymentFixed;
import com.opengamma.analytics.financial.interestrate.payments.derivative.Coupon;
import com.opengamma.analytics.financial.interestrate.swap.derivative.SwapFixedCoupon;
import com.opengamma.analytics.financial.interestrate.swap.provider.SwapFixedCouponDiscountingMethod;
import com.opengamma.analytics.financial.interestrate.swaption.derivative.SwaptionPhysicalFixedIbor;
import com.opengamma.analytics.financial.model.interestrate.HullWhiteOneFactorPiecewiseConstantInterestRateModel;
import com.opengamma.analytics.financial.model.interestrate.definition.HullWhiteOneFactorPiecewiseConstantParameters;
import com.opengamma.analytics.financial.model.option.pricing.analytic.formula.BlackFunctionData;
import com.opengamma.analytics.financial.model.option.pricing.analytic.formula.NormalFunctionData;
import com.opengamma.analytics.financial.model.volatility.BlackImpliedVolatilityFormula;
import com.opengamma.analytics.financial.model.volatility.NormalImpliedVolatilityFormula;
import com.opengamma.analytics.financial.montecarlo.provider.HullWhiteMonteCarloMethod;
import com.opengamma.analytics.financial.provider.calculator.discounting.CashFlowEquivalentCalculator;
import com.opengamma.analytics.financial.provider.calculator.discounting.ParRateDiscountingCalculator;
import com.opengamma.analytics.financial.provider.calculator.discounting.PresentValueCurveSensitivityDiscountingCalculator;
import com.opengamma.analytics.financial.provider.calculator.discounting.PresentValueDiscountingCalculator;
import com.opengamma.analytics.financial.provider.calculator.hullwhite.PresentValueCurveSensitivityHullWhiteCalculator;
import com.opengamma.analytics.financial.provider.calculator.hullwhite.PresentValueHullWhiteCalculator;
import com.opengamma.analytics.financial.provider.description.HullWhiteDataSets;
import com.opengamma.analytics.financial.provider.description.MulticurveProviderDiscountDataSets;
import com.opengamma.analytics.financial.provider.description.interestrate.HullWhiteOneFactorProviderDiscount;
import com.opengamma.analytics.financial.provider.description.interestrate.HullWhiteOneFactorProviderInterface;
import com.opengamma.analytics.financial.provider.description.interestrate.MulticurveProviderDiscount;
import com.opengamma.analytics.financial.provider.sensitivity.hullwhite.ParameterSensitivityHullWhiteDiscountInterpolatedFDCalculator;
import com.opengamma.analytics.financial.provider.sensitivity.multicurve.MultipleCurrencyMulticurveSensitivity;
import com.opengamma.analytics.financial.provider.sensitivity.multicurve.MultipleCurrencyParameterSensitivity;
import com.opengamma.analytics.financial.provider.sensitivity.parameter.ParameterSensitivityParameterCalculator;
import com.opengamma.analytics.financial.schedule.ScheduleCalculator;
import com.opengamma.analytics.financial.util.AssertSensitivityObjects;
import com.opengamma.analytics.math.random.NormalRandomNumberGenerator;
import com.opengamma.analytics.math.statistics.distribution.NormalDistribution;
import com.opengamma.analytics.math.statistics.distribution.ProbabilityDistribution;
import com.opengamma.util.money.Currency;
import com.opengamma.util.money.MultipleCurrencyAmount;
import com.opengamma.util.test.TestGroup;
import com.opengamma.util.time.DateUtils;
import cern.jet.random.engine.MersenneTwister;
/**
* Tests related to the pricing of physical delivery swaption in Hull-White one factor model.
*/
@Test(groups = TestGroup.UNIT)
public class SwaptionPhysicalFixedIborHullWhiteMethodTest {
private static final MulticurveProviderDiscount MULTICURVES = MulticurveProviderDiscountDataSets.createMulticurveEurUsd();
private static final IborIndex EURIBOR6M = MulticurveProviderDiscountDataSets.getIndexesIborMulticurveEurUsd()[1];
private static final Currency EUR = EURIBOR6M.getCurrency();
private static final WorkingDayCalendar CALENDAR = MulticurveProviderDiscountDataSets.getEURCalendar();
private static final HullWhiteOneFactorPiecewiseConstantParameters HW_PARAMETERS = HullWhiteDataSets.createHullWhiteParameters();
private static final HullWhiteOneFactorProviderDiscount HW_MULTICURVES = new HullWhiteOneFactorProviderDiscount(MULTICURVES,
HW_PARAMETERS, EUR);
private static final ZonedDateTime REFERENCE_DATE = DateUtils.getUTCDate(2011, 7, 7);
// Swaption 5Yx5Y
private static final int SPOT_LAG = EURIBOR6M.getSpotLag();
private static final int SWAP_TENOR_YEAR = 5;
private static final Period SWAP_TENOR = Period.ofYears(SWAP_TENOR_YEAR);
private static final GeneratorSwapFixedIbor EUR1YEURIBOR6M = GeneratorSwapFixedIborMaster.getInstance().getGenerator("EUR1YEURIBOR6M",
CALENDAR);
private static final ZonedDateTime EXPIRY_DATE = DateUtils.getUTCDate(2016, 7, 7);
private static final boolean IS_LONG = true;
private static final ZonedDateTime SETTLEMENT_DATE = ScheduleCalculator.getAdjustedDate(EXPIRY_DATE, SPOT_LAG, CALENDAR);
private static final double NOTIONAL = 100000000; // 100m
private static final double RATE = 0.0175;
private static final boolean FIXED_IS_PAYER = true;
private static final SwapFixedIborDefinition SWAP_PAYER_DEFINITION = SwapFixedIborDefinition.from(SETTLEMENT_DATE, SWAP_TENOR,
EUR1YEURIBOR6M, NOTIONAL, RATE, FIXED_IS_PAYER);
private static final SwapFixedIborDefinition SWAP_RECEIVER_DEFINITION = SwapFixedIborDefinition.from(SETTLEMENT_DATE, SWAP_TENOR,
EUR1YEURIBOR6M, NOTIONAL, RATE, !FIXED_IS_PAYER);
private static final SwaptionPhysicalFixedIborDefinition SWAPTION_LONG_PAYER_DEFINITION = SwaptionPhysicalFixedIborDefinition
.from(EXPIRY_DATE, SWAP_PAYER_DEFINITION, true, IS_LONG);
private static final SwaptionPhysicalFixedIborDefinition SWAPTION_LONG_RECEIVER_DEFINITION = SwaptionPhysicalFixedIborDefinition
.from(EXPIRY_DATE, SWAP_RECEIVER_DEFINITION, false, IS_LONG);
private static final SwaptionPhysicalFixedIborDefinition SWAPTION_SHORT_PAYER_DEFINITION = SwaptionPhysicalFixedIborDefinition
.from(EXPIRY_DATE, SWAP_PAYER_DEFINITION, true, !IS_LONG);
private static final SwaptionPhysicalFixedIborDefinition SWAPTION_SHORT_RECEIVER_DEFINITION = SwaptionPhysicalFixedIborDefinition
.from(EXPIRY_DATE, SWAP_RECEIVER_DEFINITION, false, !IS_LONG);
private static final SwapFixedCoupon<Coupon> SWAP_RECEIVER = SWAP_RECEIVER_DEFINITION.toDerivative(REFERENCE_DATE);
private static final SwaptionPhysicalFixedIbor SWAPTION_LONG_PAYER = SWAPTION_LONG_PAYER_DEFINITION.toDerivative(REFERENCE_DATE);
private static final SwaptionPhysicalFixedIbor SWAPTION_LONG_RECEIVER = SWAPTION_LONG_RECEIVER_DEFINITION.toDerivative(REFERENCE_DATE);
private static final SwaptionPhysicalFixedIbor SWAPTION_SHORT_PAYER = SWAPTION_SHORT_PAYER_DEFINITION.toDerivative(REFERENCE_DATE);
private static final SwaptionPhysicalFixedIbor SWAPTION_SHORT_RECEIVER = SWAPTION_SHORT_RECEIVER_DEFINITION.toDerivative(REFERENCE_DATE);
// Calculator
private static final SwaptionPhysicalFixedIborHullWhiteMethod METHOD_HW = SwaptionPhysicalFixedIborHullWhiteMethod.getInstance();
private static final SwapFixedCouponDiscountingMethod METHOD_SWAP = SwapFixedCouponDiscountingMethod.getInstance();
private static final CashFlowEquivalentCalculator CFEC = CashFlowEquivalentCalculator.getInstance();
private static final ParRateDiscountingCalculator PRDC = ParRateDiscountingCalculator.getInstance();
private static final PresentValueDiscountingCalculator PVDC = PresentValueDiscountingCalculator.getInstance();
private static final PresentValueCurveSensitivityDiscountingCalculator PVCSDC = PresentValueCurveSensitivityDiscountingCalculator
.getInstance();
private static final PresentValueHullWhiteCalculator PVHWC = PresentValueHullWhiteCalculator.getInstance();
private static final PresentValueCurveSensitivityHullWhiteCalculator PVCSHWC = PresentValueCurveSensitivityHullWhiteCalculator
.getInstance();
private static final double SHIFT = 1.0E-6;
private static final ParameterSensitivityParameterCalculator<HullWhiteOneFactorProviderInterface> PS_HW_C = new ParameterSensitivityParameterCalculator<>(
PVCSHWC);
private static final ParameterSensitivityHullWhiteDiscountInterpolatedFDCalculator PS_HW_FDC = new ParameterSensitivityHullWhiteDiscountInterpolatedFDCalculator(
PVHWC, SHIFT);
private static final SwaptionPhysicalFixedIborHullWhiteNumericalIntegrationMethod METHOD_HW_INTEGRATION = SwaptionPhysicalFixedIborHullWhiteNumericalIntegrationMethod
.getInstance();
private static final SwaptionPhysicalFixedIborHullWhiteApproximationMethod METHOD_HW_APPROXIMATION = SwaptionPhysicalFixedIborHullWhiteApproximationMethod
.getInstance();
private static final int NB_PATH = 12500;
private static final HullWhiteMonteCarloMethod METHOD_HW_MONTECARLO = new HullWhiteMonteCarloMethod(
new NormalRandomNumberGenerator(0.0, 1.0), NB_PATH);
private static final HullWhiteOneFactorPiecewiseConstantInterestRateModel MODEL = new HullWhiteOneFactorPiecewiseConstantInterestRateModel();
private static final ProbabilityDistribution<Double> NORMAL = new NormalDistribution(0, 1);
private static final double TOLERANCE_PV = 1.0E-2;
private static final double TOLERANCE_PV_DELTA = 1.0E+0; // Testing note: Sensitivity is for a movement of 1. 1E+2 = 1 cent for a 1 bp
// move.
/**
* Test the present value.
*/
@Test
public void presentValueExplicit() {
final MultipleCurrencyAmount pv = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final double timeToExpiry = SWAPTION_LONG_PAYER.getTimeToExpiry();
final AnnuityPaymentFixed cfe = CFEC.visitSwap(SWAPTION_LONG_PAYER.getUnderlyingSwap(), MULTICURVES);
final int numberOfPayments = cfe.getNumberOfPayments();
final double[] alpha = new double[numberOfPayments];
final double[] disccf = new double[numberOfPayments];
for (int loopcf = 0; loopcf < numberOfPayments; loopcf++) {
alpha[loopcf] = MODEL.alpha(HW_PARAMETERS, 0.0, timeToExpiry, timeToExpiry, cfe.getNthPayment(loopcf).getPaymentTime());
disccf[loopcf] = MULTICURVES.getDiscountFactor(EUR, cfe.getNthPayment(loopcf).getPaymentTime())
* cfe.getNthPayment(loopcf).getAmount();
}
final double kappa = MODEL.kappa(disccf, alpha);
double pvExpected = 0.0;
for (int loopcf = 0; loopcf < numberOfPayments; loopcf++) {
pvExpected += disccf[loopcf] * NORMAL.getCDF(-kappa - alpha[loopcf]);
}
assertEquals("Swaption physical - Hull-White - present value", pvExpected, pv.getAmount(EUR), 1E-2);
final MultipleCurrencyAmount pv2 = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, cfe, HW_MULTICURVES);
assertEquals("Swaption physical - Hull-White - present value", pv, pv2);
}
/**
* Tests long/short parity.
*/
@Test
public void longShortParityExplicit() {
final MultipleCurrencyAmount pvLong = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final MultipleCurrencyAmount pvShort = METHOD_HW.presentValue(SWAPTION_SHORT_PAYER, HW_MULTICURVES);
assertEquals("Swaption physical - Hull-White - present value - long/short parity", pvLong.getAmount(EUR), -pvShort.getAmount(EUR),
TOLERANCE_PV);
}
/**
* Tests payer/receiver/swap parity.
*/
@Test
public void payerReceiverParityExplicit() {
final MultipleCurrencyAmount pvReceiverLong = METHOD_HW.presentValue(SWAPTION_LONG_RECEIVER, HW_MULTICURVES);
final MultipleCurrencyAmount pvPayerShort = METHOD_HW.presentValue(SWAPTION_SHORT_PAYER, HW_MULTICURVES);
final MultipleCurrencyAmount pvSwap = SWAP_RECEIVER.accept(PVDC, MULTICURVES);
assertEquals("Swaption physical - Hull-White - present value - payer/receiver/swap parity",
pvReceiverLong.getAmount(EUR) + pvPayerShort.getAmount(EUR), pvSwap.getAmount(EUR), TOLERANCE_PV);
}
/**
* Tests the method against the present value calculator.
*/
@Test
public void presentValueMethodVsCalculator() {
final MultipleCurrencyAmount pvMethod = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final MultipleCurrencyAmount pvCalculator = SWAPTION_LONG_PAYER.accept(PVHWC, HW_MULTICURVES);
assertEquals("SwaptionPhysicalFixedIborSABRMethod: present value : method and calculator", pvMethod, pvCalculator);
}
/**
* Compare explicit formula with numerical integration.
*/
@Test
public void presentValueNumericalIntegration() {
final MultipleCurrencyAmount pvPayerLongExplicit = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final MultipleCurrencyAmount pvPayerLongIntegration = METHOD_HW_INTEGRATION.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
assertEquals("Swaption physical - Hull-White - present value - explicit/numerical integration", pvPayerLongExplicit.getAmount(EUR),
pvPayerLongIntegration.getAmount(EUR), TOLERANCE_PV);
final MultipleCurrencyAmount pvPayerShortExplicit = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final MultipleCurrencyAmount pvPayerShortIntegration = METHOD_HW_INTEGRATION.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
assertEquals("Swaption physical - Hull-White - present value - explicit/numerical integration", pvPayerShortExplicit.getAmount(EUR),
pvPayerShortIntegration.getAmount(EUR), TOLERANCE_PV);
final MultipleCurrencyAmount pvReceiverLongExplicit = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final MultipleCurrencyAmount pvReceiverLongIntegration = METHOD_HW_INTEGRATION.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
assertEquals("Swaption physical - Hull-White - present value - explicit/numerical integration", pvReceiverLongExplicit.getAmount(EUR),
pvReceiverLongIntegration.getAmount(EUR), TOLERANCE_PV);
final MultipleCurrencyAmount pvReceiverShortExplicit = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final MultipleCurrencyAmount pvReceiverShortIntegration = METHOD_HW_INTEGRATION.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
assertEquals("Swaption physical - Hull-White - present value - explicit/numerical integration", pvReceiverShortExplicit.getAmount(EUR),
pvReceiverShortIntegration.getAmount(EUR), TOLERANCE_PV);
}
/**
* Compare explicit formula with approximated formula.
*/
@Test
public void presentValueApproximation() {
final BlackImpliedVolatilityFormula implied = new BlackImpliedVolatilityFormula();
final double forward = SWAPTION_LONG_PAYER.getUnderlyingSwap().accept(ParRateDiscountingCalculator.getInstance(), MULTICURVES);
final double pvbp = METHOD_SWAP.presentValueBasisPoint(SWAPTION_LONG_PAYER.getUnderlyingSwap(), MULTICURVES);
final MultipleCurrencyAmount pvPayerLongExplicit = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final MultipleCurrencyAmount pvPayerLongApproximation = METHOD_HW_APPROXIMATION.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final BlackFunctionData data = new BlackFunctionData(forward, pvbp, 0.20);
final double volExplicit = implied.getImpliedVolatility(data, SWAPTION_LONG_PAYER, pvPayerLongExplicit.getAmount(EUR));
final double volApprox = implied.getImpliedVolatility(data, SWAPTION_LONG_PAYER, pvPayerLongApproximation.getAmount(EUR));
assertEquals("Swaption physical - Hull-White - present value - explicit/approximation", pvPayerLongExplicit.getAmount(EUR),
pvPayerLongApproximation.getAmount(EUR), 5.0E+2);
assertEquals("Swaption physical - Hull-White - present value - explicit/approximation", volExplicit, volApprox, 2.5E-4); // 0.025%
final MultipleCurrencyAmount pvReceiverLongExplicit = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final MultipleCurrencyAmount pvReceiverLongApproximation = METHOD_HW_APPROXIMATION.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
assertEquals("Swaption physical - Hull-White - present value - explicit/numerical integration", pvReceiverLongExplicit.getAmount(EUR),
pvReceiverLongApproximation.getAmount(EUR), 5.0E+2);
}
/**
* Approximation analysis.
*/
@Test
public void presentValueApproximationAnalysis() {
final NormalImpliedVolatilityFormula implied = new NormalImpliedVolatilityFormula();
final int nbStrike = 20;
final double[] pvExplicit = new double[nbStrike + 1];
final double[] pvApproximation = new double[nbStrike + 1];
final double[] strike = new double[nbStrike + 1];
final double[] volExplicit = new double[nbStrike + 1];
final double[] volApprox = new double[nbStrike + 1];
final double strikeRange = 0.010;
final SwapFixedCoupon<Coupon> swap = SWAP_PAYER_DEFINITION.toDerivative(REFERENCE_DATE);
final double forward = swap.accept(PRDC, MULTICURVES);
final double pvbp = METHOD_SWAP.presentValueBasisPoint(swap, MULTICURVES);
for (int loopstrike = 0; loopstrike <= nbStrike; loopstrike++) {
strike[loopstrike] = forward - strikeRange + 3 * strikeRange * loopstrike / nbStrike; // From forward-strikeRange to
// forward+2*strikeRange
final SwapFixedIborDefinition swapDefinition = SwapFixedIborDefinition.from(SETTLEMENT_DATE, SWAP_TENOR, EUR1YEURIBOR6M, NOTIONAL,
strike[loopstrike], FIXED_IS_PAYER);
final SwaptionPhysicalFixedIborDefinition swaptionDefinition = SwaptionPhysicalFixedIborDefinition.from(EXPIRY_DATE, swapDefinition,
FIXED_IS_PAYER, IS_LONG);
final SwaptionPhysicalFixedIbor swaption = swaptionDefinition.toDerivative(REFERENCE_DATE);
pvExplicit[loopstrike] = METHOD_HW.presentValue(swaption, HW_MULTICURVES).getAmount(EUR);
pvApproximation[loopstrike] = METHOD_HW_APPROXIMATION.presentValue(swaption, HW_MULTICURVES).getAmount(EUR);
final NormalFunctionData data = new NormalFunctionData(forward, pvbp, 0.01);
volExplicit[loopstrike] = implied.getImpliedVolatility(data, swaption, pvExplicit[loopstrike]);
volApprox[loopstrike] = implied.getImpliedVolatility(data, swaption, pvApproximation[loopstrike]);
assertEquals("Swaption physical - Hull-White - implied volatility - explicit/approximation", volExplicit[loopstrike],
volApprox[loopstrike], 1.0E-3); // 0.10%
}
}
/**
* Compare explicit formula with Monte-Carlo and long/short and payer/receiver parities.
*/
@Test(enabled = true)
public void presentValueMonteCarlo() {
HullWhiteMonteCarloMethod methodMC;
methodMC = new HullWhiteMonteCarloMethod(new NormalRandomNumberGenerator(0.0, 1.0, new MersenneTwister()), NB_PATH);
// Seed fixed to the DEFAULT_SEED for testing purposes.
final MultipleCurrencyAmount pvPayerLongExplicit = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final MultipleCurrencyAmount pvPayerLongMC = methodMC.presentValue(SWAPTION_LONG_PAYER, EUR, HW_MULTICURVES);
assertEquals("Swaption physical - Hull-White - Monte Carlo", pvPayerLongExplicit.getAmount(EUR), pvPayerLongMC.getAmount(EUR), 1.0E+4);
final double pvMCPreviousRun = 4221400.891;
assertEquals("Swaption physical - Hull-White - Monte Carlo", pvMCPreviousRun, pvPayerLongMC.getAmount(EUR), TOLERANCE_PV);
methodMC = new HullWhiteMonteCarloMethod(new NormalRandomNumberGenerator(0.0, 1.0, new MersenneTwister()), NB_PATH);
final MultipleCurrencyAmount pvPayerShortMC = methodMC.presentValue(SWAPTION_SHORT_PAYER, EUR, HW_MULTICURVES);
assertEquals("Swaption physical - Hull-White - Monte Carlo", -pvPayerLongMC.getAmount(EUR), pvPayerShortMC.getAmount(EUR),
TOLERANCE_PV);
final MultipleCurrencyAmount pvReceiverLongMC = methodMC.presentValue(SWAPTION_LONG_RECEIVER, EUR, HW_MULTICURVES);
final MultipleCurrencyAmount pvSwap = SWAP_RECEIVER.accept(PVDC, MULTICURVES);
assertEquals("Swaption physical - Hull-White - Monte Carlo - payer/receiver/swap parity",
pvReceiverLongMC.getAmount(EUR) + pvPayerShortMC.getAmount(EUR), pvSwap.getAmount(EUR), 1.0E+5);
}
/**
* Tests the Hull-White parameters sensitivity for the explicit formula.
*/
@Test
public void presentValueHullWhiteSensitivityExplicit() {
final double[] hwSensitivity = METHOD_HW.presentValueHullWhiteSensitivity(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final int nbVolatility = HW_PARAMETERS.getVolatility().length;
final double shiftVol = 1.0E-6;
final double[] volatilityBumped = new double[nbVolatility];
System.arraycopy(HW_PARAMETERS.getVolatility(), 0, volatilityBumped, 0, nbVolatility);
final double[] volatilityTime = new double[nbVolatility - 1];
System.arraycopy(HW_PARAMETERS.getVolatilityTime(), 1, volatilityTime, 0, nbVolatility - 1);
final double[] pvBumpedPlus = new double[nbVolatility];
final double[] pvBumpedMinus = new double[nbVolatility];
final HullWhiteOneFactorPiecewiseConstantParameters parametersBumped = new HullWhiteOneFactorPiecewiseConstantParameters(
HW_PARAMETERS.getMeanReversion(), volatilityBumped, volatilityTime);
final HullWhiteOneFactorProviderDiscount bundleBumped = new HullWhiteOneFactorProviderDiscount(MULTICURVES, parametersBumped, EUR);
for (int loopvol = 0; loopvol < nbVolatility; loopvol++) {
volatilityBumped[loopvol] += shiftVol;
parametersBumped.setVolatility(volatilityBumped);
pvBumpedPlus[loopvol] = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, bundleBumped).getAmount(EUR);
volatilityBumped[loopvol] -= 2 * shiftVol;
parametersBumped.setVolatility(volatilityBumped);
pvBumpedMinus[loopvol] = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, bundleBumped).getAmount(EUR);
assertEquals(
"Swaption - Hull-White sensitivity adjoint: derivative " + loopvol + " - difference:"
+ ((pvBumpedPlus[loopvol] - pvBumpedMinus[loopvol]) / (2 * shiftVol) - hwSensitivity[loopvol]),
(pvBumpedPlus[loopvol] - pvBumpedMinus[loopvol]) / (2 * shiftVol), hwSensitivity[loopvol], TOLERANCE_PV_DELTA);
volatilityBumped[loopvol] = HW_PARAMETERS.getVolatility()[loopvol];
}
}
/**
* Tests long/short parity.
*/
@Test
public void presentValueHullWhiteSensitivityLongShortParityExplicit() {
final double[] pvhwsLong = METHOD_HW.presentValueHullWhiteSensitivity(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final double[] pvhwsShort = METHOD_HW.presentValueHullWhiteSensitivity(SWAPTION_SHORT_PAYER, HW_MULTICURVES);
for (int loophw = 0; loophw < pvhwsLong.length; loophw++) {
assertEquals("Swaption physical - Hull-White - presentValueHullWhiteSensitivity - long/short parity", pvhwsLong[loophw],
-pvhwsShort[loophw], TOLERANCE_PV_DELTA);
}
}
/**
* Tests payer/receiver/swap parity.
*/
@Test
public void presentValueHullWhiteSensitivityPayerReceiverParityExplicit() {
final double[] pvhwsReceiverLong = METHOD_HW.presentValueHullWhiteSensitivity(SWAPTION_LONG_RECEIVER, HW_MULTICURVES);
final double[] pvhwsPayerShort = METHOD_HW.presentValueHullWhiteSensitivity(SWAPTION_SHORT_PAYER, HW_MULTICURVES);
for (int loophw = 0; loophw < pvhwsReceiverLong.length; loophw++) {
assertEquals("Swaption physical - Hull-White - present value - payer/receiver/swap parity", 0,
pvhwsReceiverLong[loophw] + pvhwsPayerShort[loophw], TOLERANCE_PV_DELTA);
}
}
/**
* Tests present value curve sensitivity when the valuation date is on trade date.
*/
@Test
public void presentValueCurveSensitivity() {
final MultipleCurrencyParameterSensitivity pvpsExact = PS_HW_C.calculateSensitivity(SWAPTION_SHORT_RECEIVER, HW_MULTICURVES,
HW_MULTICURVES.getMulticurveProvider().getAllNames());
final MultipleCurrencyParameterSensitivity pvpsFD = PS_HW_FDC.calculateSensitivity(SWAPTION_SHORT_RECEIVER, HW_MULTICURVES);
AssertSensitivityObjects.assertEquals("SwaptionPhysicalFixedIborSABRMethod: presentValueCurveSensitivity ", pvpsExact, pvpsFD,
TOLERANCE_PV_DELTA);
}
/**
* Tests present value curve sensitivity when the valuation date is on trade date.
*/
@Test(enabled = false)
public void presentValueCurveSensitivityStability() {
// 5Yx5Y
final MultipleCurrencyParameterSensitivity pvpsExact = PS_HW_C.calculateSensitivity(SWAPTION_SHORT_RECEIVER, HW_MULTICURVES,
HW_MULTICURVES.getMulticurveProvider().getAllNames());
final double derivativeExact = pvpsExact.totalSensitivity(MULTICURVES.getFxRates(), EUR);
final double startingShift = 1.0E-4;
final double ratio = Math.sqrt(2.0);
final int nbShift = 55;
final double[] eps = new double[nbShift + 1];
final double[] derivative_FD = new double[nbShift];
final double[] diff = new double[nbShift];
eps[0] = startingShift;
for (int loopshift = 0; loopshift < nbShift; loopshift++) {
final ParameterSensitivityHullWhiteDiscountInterpolatedFDCalculator fdShift = new ParameterSensitivityHullWhiteDiscountInterpolatedFDCalculator(
PVHWC, eps[loopshift]);
final MultipleCurrencyParameterSensitivity pvpsFD = fdShift.calculateSensitivity(SWAPTION_SHORT_RECEIVER, HW_MULTICURVES);
derivative_FD[loopshift] = pvpsFD.totalSensitivity(MULTICURVES.getFxRates(), EUR);
diff[loopshift] = derivative_FD[loopshift] - derivativeExact;
eps[loopshift + 1] = eps[loopshift] / ratio;
}
// 1Mx5Y
final Period expirationPeriod = Period.ofDays(1); // Period.ofDays(1); Period.ofDays(7); Period.ofMonths(1); Period.ofYears(1);
// Period.ofYears(10);
final ZonedDateTime expiryDateExp = ScheduleCalculator.getAdjustedDate(REFERENCE_DATE, expirationPeriod, EURIBOR6M, CALENDAR);
final ZonedDateTime settlementDateExp = ScheduleCalculator.getAdjustedDate(expiryDateExp, SPOT_LAG, CALENDAR);
final double atm = 0.0151; // 1W: 1.52% - 1M: 1.52% - 1Y: 1.51% - 10Y: 1.51%
final SwapFixedIborDefinition swapExpx5YDefinition = SwapFixedIborDefinition.from(settlementDateExp, SWAP_TENOR, EUR1YEURIBOR6M,
NOTIONAL, atm, !FIXED_IS_PAYER);
final SwaptionPhysicalFixedIborDefinition swaptionExpx5YDefinition = SwaptionPhysicalFixedIborDefinition.from(EXPIRY_DATE,
swapExpx5YDefinition, !FIXED_IS_PAYER, !IS_LONG);
final SwaptionPhysicalFixedIbor swaptionExpx5Y = swaptionExpx5YDefinition.toDerivative(REFERENCE_DATE);
// final double forward = swaptionExpx5Y.getUnderlyingSwap().accept(PRDC, MULTICURVES);
final MultipleCurrencyParameterSensitivity pvpsExactExp = PS_HW_C.calculateSensitivity(swaptionExpx5Y, HW_MULTICURVES,
HW_MULTICURVES.getMulticurveProvider().getAllNames());
final double derivativeExactExp = pvpsExactExp.totalSensitivity(MULTICURVES.getFxRates(), EUR);
final double[] derivative_FDExp = new double[nbShift];
final double[] diffExp = new double[nbShift];
for (int loopshift = 0; loopshift < nbShift; loopshift++) {
final ParameterSensitivityHullWhiteDiscountInterpolatedFDCalculator fdShift = new ParameterSensitivityHullWhiteDiscountInterpolatedFDCalculator(
PVHWC, eps[loopshift]);
final MultipleCurrencyParameterSensitivity pvpsFD = fdShift.calculateSensitivity(swaptionExpx5Y, HW_MULTICURVES);
derivative_FDExp[loopshift] = pvpsFD.totalSensitivity(MULTICURVES.getFxRates(), EUR);
diffExp[loopshift] = derivative_FDExp[loopshift] - derivativeExactExp;
}
// int t = 0;
// t++;
}
/**
* Tests long/short parity.
*/
@Test
public void presentValueCurveSensitivityLongShortParityExplicit() {
final MultipleCurrencyMulticurveSensitivity pvhwsLong = METHOD_HW.presentValueCurveSensitivity(SWAPTION_LONG_PAYER, HW_MULTICURVES);
final MultipleCurrencyMulticurveSensitivity pvhwsShort = METHOD_HW.presentValueCurveSensitivity(SWAPTION_SHORT_PAYER, HW_MULTICURVES);
AssertSensitivityObjects.assertEquals("Swaption physical - Hull-White - presentValueCurveSensitivity - long/short parity", pvhwsLong,
pvhwsShort.multipliedBy(-1.0), TOLERANCE_PV_DELTA);
}
/**
* Tests payer/receiver/swap parity.
*/
@Test
public void presentValueCurveSensitivityPayerReceiverParityExplicit() {
final MultipleCurrencyMulticurveSensitivity pvhwsReceiverLong = METHOD_HW.presentValueCurveSensitivity(SWAPTION_LONG_RECEIVER,
HW_MULTICURVES);
final MultipleCurrencyMulticurveSensitivity pvhwsPayerShort = METHOD_HW.presentValueCurveSensitivity(SWAPTION_SHORT_PAYER,
HW_MULTICURVES);
final MultipleCurrencyMulticurveSensitivity pvSwap = SWAP_RECEIVER.accept(PVCSDC, MULTICURVES);
AssertSensitivityObjects.assertEquals("Swaption physical - Hull-White - presentValueCurveSensitivity - payer/receiver/swap parity",
pvSwap.cleaned(TOLERANCE_PV_DELTA),
pvhwsReceiverLong.plus(pvhwsPayerShort).cleaned(TOLERANCE_PV_DELTA), TOLERANCE_PV_DELTA);
}
/**
* Tests the curve sensitivity in Monte Carlo approach.
*/
@Test
public void presentValueCurveSensitivityMonteCarlo() {
final double toleranceDelta = 1.0E+6; // 100 USD by bp
final MultipleCurrencyMulticurveSensitivity pvcsExplicit = METHOD_HW.presentValueCurveSensitivity(SWAPTION_LONG_PAYER, HW_MULTICURVES)
.cleaned(TOLERANCE_PV_DELTA);
final HullWhiteMonteCarloMethod methodMC = new HullWhiteMonteCarloMethod(
new NormalRandomNumberGenerator(0.0, 1.0, new MersenneTwister()), NB_PATH);
final MultipleCurrencyMulticurveSensitivity pvcsMC = methodMC.presentValueCurveSensitivity(SWAPTION_LONG_PAYER, EUR, HW_MULTICURVES)
.cleaned(TOLERANCE_PV_DELTA);
AssertSensitivityObjects.assertEquals("Swaption physical - Hull-White - presentValueCurveSensitivity - payer/receiver/swap parity",
pvcsExplicit, pvcsMC, toleranceDelta);
}
/**
* Tests of performance. "enabled = false" for the standard testing.
*/
@Test(enabled = false)
public void performance() {
long startTime, endTime;
final int nbTest = 1000;
MultipleCurrencyAmount pvPayerLongExplicit = MultipleCurrencyAmount.of(EUR, 0.0);
MultipleCurrencyAmount pvPayerLongIntegration = MultipleCurrencyAmount.of(EUR, 0.0);
MultipleCurrencyAmount pvPayerLongApproximation = MultipleCurrencyAmount.of(EUR, 0.0);
@SuppressWarnings("unused")
MultipleCurrencyAmount pvPayerLongMC = MultipleCurrencyAmount.of(EUR, 0.0);
double[] pvhws = METHOD_HW.presentValueHullWhiteSensitivity(SWAPTION_LONG_PAYER, HW_MULTICURVES);
MultipleCurrencyMulticurveSensitivity pvcs = METHOD_HW.presentValueCurveSensitivity(SWAPTION_LONG_PAYER, HW_MULTICURVES);
startTime = System.currentTimeMillis();
for (int looptest = 0; looptest < nbTest; looptest++) {
pvPayerLongExplicit = METHOD_HW.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
}
endTime = System.currentTimeMillis();
System.out.println(nbTest + " pv swaption Hull-White explicit method: " + (endTime - startTime) + " ms");
// Performance note: HW price: 19-Nov-2012: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 380 ms for 10000 swaptions.
startTime = System.currentTimeMillis();
for (int looptest = 0; looptest < nbTest; looptest++) {
pvhws = METHOD_HW.presentValueHullWhiteSensitivity(SWAPTION_LONG_PAYER, HW_MULTICURVES);
}
endTime = System.currentTimeMillis();
System.out.println(nbTest + " HW sensitivity swaption Hull-White explicit method: " + (endTime - startTime) + " ms");
// Performance note: HW sensitivity (3): 19-Nov-2012: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 430 ms for 10000 swaptions.
startTime = System.currentTimeMillis();
for (int looptest = 0; looptest < nbTest; looptest++) {
pvcs = METHOD_HW.presentValueCurveSensitivity(SWAPTION_LONG_PAYER, HW_MULTICURVES);
}
endTime = System.currentTimeMillis();
System.out.println(nbTest + " curve sensitivity swaption Hull-White explicit method: " + (endTime - startTime) + " ms");
// Performance note: curve sensitivity (40): 19-Nov-2012: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 855 ms for 10000 swaptions.
startTime = System.currentTimeMillis();
for (int looptest = 0; looptest < nbTest; looptest++) {
pvhws = METHOD_HW.presentValueHullWhiteSensitivity(SWAPTION_LONG_PAYER, HW_MULTICURVES);
pvcs = METHOD_HW.presentValueCurveSensitivity(SWAPTION_LONG_PAYER, HW_MULTICURVES);
pvhws = METHOD_HW.presentValueHullWhiteSensitivity(SWAPTION_LONG_PAYER, HW_MULTICURVES);
}
endTime = System.currentTimeMillis();
System.out.println(nbTest + " price/delta/vega swaption Hull-White explicit method: " + (endTime - startTime) + " ms");
// Performance note: present value/delta/vega: 19-Nov-2012: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 1730 ms for 10000 swaptions.
startTime = System.currentTimeMillis();
for (int looptest = 0; looptest < nbTest; looptest++) {
pvPayerLongIntegration = METHOD_HW_INTEGRATION.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
}
endTime = System.currentTimeMillis();
System.out.println(nbTest + " swaption Hull-White numerical integration method: " + (endTime - startTime) + " ms");
// Performance note: HW numerical integration: 19-Nov-2012: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 1700 ms for 10000 swaptions.
startTime = System.currentTimeMillis();
for (int looptest = 0; looptest < nbTest; looptest++) {
pvPayerLongApproximation = METHOD_HW_APPROXIMATION.presentValue(SWAPTION_LONG_PAYER, HW_MULTICURVES);
}
endTime = System.currentTimeMillis();
System.out.println(nbTest + " swaption Hull-White approximation method: " + (endTime - startTime) + " ms");
// Performance note: HW approximation: 19-Nov-2012: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 250 ms for 10000 swaptions.
startTime = System.currentTimeMillis();
for (int looptest = 0; looptest < nbTest; looptest++) {
pvPayerLongMC = METHOD_HW_MONTECARLO.presentValue(SWAPTION_LONG_PAYER, EUR, HW_MULTICURVES);
}
endTime = System.currentTimeMillis();
System.out.println(nbTest + " swaption Hull-White Monte Carlo method (" + NB_PATH + " paths): " + (endTime - startTime) + " ms");
// Performance note: HW approximation: 18-Aug-11: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 9200 ms for 1000 swaptions (12500 paths).
final double difference = pvPayerLongExplicit.getAmount(EUR) - pvPayerLongIntegration.getAmount(EUR);
final double difference2 = pvPayerLongExplicit.getAmount(EUR) - pvPayerLongApproximation.getAmount(EUR);
// double difference3 = pvPayerLongExplicit.getAmount(CUR) - pvPayerLongMC.getAmount(CUR);
System.out.println("Difference explicit-integration: " + difference);
System.out.println("Difference explicit-approximation: " + difference2);
// System.out.println("Difference explicit-Monte Carlo: " + difference3);
System.out.println("Curve sensitivity: " + pvcs.toString());
System.out.println("HW sensitivity: " + Arrays.toString(pvhws));
}
/**
* Tests of performance. "enabled = false" for the standard testing.
*/
@Test(enabled = false)
public void performanceCurveSensitivity() {
long startTime, endTime;
final int nbTest = 25;
MultipleCurrencyAmount pvMC = MultipleCurrencyAmount.of(EUR, 0.0);
final MultipleCurrencyMulticurveSensitivity pvcsExplicit = METHOD_HW.presentValueCurveSensitivity(SWAPTION_LONG_PAYER, HW_MULTICURVES);
MultipleCurrencyMulticurveSensitivity pvcsMC = pvcsExplicit;
final HullWhiteMonteCarloMethod methodMC = new HullWhiteMonteCarloMethod(
new NormalRandomNumberGenerator(0.0, 1.0, new MersenneTwister()), NB_PATH);
startTime = System.currentTimeMillis();
for (int looptest = 0; looptest < nbTest; looptest++) {
pvMC = METHOD_HW_MONTECARLO.presentValue(SWAPTION_LONG_PAYER, EUR, HW_MULTICURVES);
}
endTime = System.currentTimeMillis();
System.out.println(nbTest + " swaption Hull-White Monte Carlo method (" + NB_PATH + " paths): " + (endTime - startTime) + " ms / price:"
+ pvMC.toString());
// Performance note: HW approximation: 03-Dec-2012: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 250 ms for 25 swaptions (12500 paths).
startTime = System.currentTimeMillis();
for (int looptest = 0; looptest < nbTest; looptest++) {
pvcsMC = methodMC.presentValueCurveSensitivity(SWAPTION_LONG_PAYER, EUR, HW_MULTICURVES);
}
endTime = System.currentTimeMillis();
System.out.println(nbTest + " curve sensitivity swaption Hull-White MC method: (" + NB_PATH + " paths) " + (endTime - startTime)
+ " ms / risk:" + pvcsMC.toString());
// Performance note: curve sensitivity (40): 03-Dec-2012: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 600 ms for 25 swaptions (12500
// paths).
}
}
| |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.android;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import com.facebook.buck.core.model.BuildTargetFactory;
import com.facebook.buck.core.model.impl.BuildTargetPaths;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.jvm.java.testutil.AbiCompilationModeTest;
import com.facebook.buck.testutil.ProcessResult;
import com.facebook.buck.testutil.TemporaryPaths;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.testutil.integration.ZipInspector;
import java.io.IOException;
import java.nio.file.Path;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
public class AndroidApkCxxIntegrationTest extends AbiCompilationModeTest {
@Rule public TemporaryPaths tmpFolder = new TemporaryPaths();
private ProjectWorkspace workspace;
private ProjectFilesystem filesystem;
@Before
public void setUp() throws IOException {
workspace =
TestDataHelper.createProjectWorkspaceForScenario(
new AndroidApkCxxIntegrationTest(), "android_project", tmpFolder);
workspace.setUp();
AssumeAndroidPlatform.get(workspace).assumeSdkIsAvailable();
AssumeAndroidPlatform.get(workspace).assumeNdkIsAvailable();
setWorkspaceCompilationMode(workspace);
filesystem = workspace.getProjectFileSystem();
}
@Test
public void testCxxLibraryAsAsset() throws IOException {
String target = "//apps/sample:app_cxx_lib_asset";
workspace.runBuckCommand("build", target).assertSuccess();
ZipInspector zipInspector =
new ZipInspector(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem.getBuckPaths(), BuildTargetFactory.newInstance(target), "%s.apk")));
zipInspector.assertFileExists("assets/lib/x86/libnative_cxx_libasset.so");
zipInspector.assertFileDoesNotExist("lib/x86/libnative_cxx_libasset.so");
zipInspector.assertFileExists("lib/x86/libnative_cxx_foo1.so");
zipInspector.assertFileExists("lib/x86/libnative_cxx_foo2.so");
zipInspector.assertFileDoesNotExist("assets/lib/x86/libnative_cxx_foo1.so");
zipInspector.assertFileDoesNotExist("assets/lib/x86/libnative_cxx_foo2.so");
}
@Test
public void testCxxLibraryAsAssetWithoutPackaging() throws IOException {
String target = "//apps/sample:app_cxx_lib_asset_no_package";
workspace.runBuckCommand("build", target).assertSuccess();
ZipInspector zipInspector =
new ZipInspector(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem.getBuckPaths(), BuildTargetFactory.newInstance(target), "%s.apk")));
zipInspector.assertFileDoesNotExist("assets/lib/x86/libnative_cxx_libasset.so");
zipInspector.assertFileExists("lib/x86/libnative_cxx_libasset.so");
}
@Test
public void testCxxLibraryDep() throws IOException {
String target = "//apps/sample:app_cxx_lib_dep";
workspace.runBuckCommand("build", target).assertSuccess();
ZipInspector zipInspector =
new ZipInspector(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem.getBuckPaths(), BuildTargetFactory.newInstance(target), "%s.apk")));
zipInspector.assertFileExists("lib/armeabi-v7a/libnative_cxx_lib.so");
zipInspector.assertFileExists("lib/x86/libnative_cxx_lib.so");
if (AssumeAndroidPlatform.get(workspace).isGnuStlAvailable()) {
zipInspector.assertFileExists("lib/armeabi-v7a/libgnustl_shared.so");
zipInspector.assertFileExists("lib/x86/libgnustl_shared.so");
} else {
zipInspector.assertFileExists("lib/armeabi-v7a/libc++_shared.so");
zipInspector.assertFileExists("lib/x86/libc++_shared.so");
}
}
@Test
public void testCxxLibraryDepStaticRuntime() throws IOException {
String target = "//apps/sample:app_cxx_lib_dep";
workspace.runBuckCommand("build", "-c", "ndk.cxx_runtime_type=static", target).assertSuccess();
ZipInspector zipInspector =
new ZipInspector(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem.getBuckPaths(), BuildTargetFactory.newInstance(target), "%s.apk")));
zipInspector.assertFileExists("lib/armeabi-v7a/libnative_cxx_lib.so");
zipInspector.assertFileExists("lib/x86/libnative_cxx_lib.so");
if (AssumeAndroidPlatform.get(workspace).isGnuStlAvailable()) {
zipInspector.assertFileDoesNotExist("lib/armeabi-v7a/libgnustl_shared.so");
zipInspector.assertFileDoesNotExist("lib/x86/libgnustl_shared.so");
} else {
zipInspector.assertFileDoesNotExist("lib/armeabi-v7a/libc++_shared.so");
zipInspector.assertFileDoesNotExist("lib/x86/libc++_shared.so");
}
}
@Test
public void testCxxLibraryDepModular() throws IOException {
String target = "//apps/sample:app_cxx_lib_dep_modular";
workspace.runBuckCommand("build", target).assertSuccess();
ZipInspector zipInspector =
new ZipInspector(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem.getBuckPaths(), BuildTargetFactory.newInstance(target), "%s.apk")));
zipInspector.assertFileDoesNotExist("lib/armeabi-v7a/libnative_cxx_lib.so");
zipInspector.assertFileDoesNotExist("lib/x86/libnative_cxx_lib.so");
if (AssumeAndroidPlatform.get(workspace).isGnuStlAvailable()) {
zipInspector.assertFileExists("lib/armeabi-v7a/libgnustl_shared.so");
zipInspector.assertFileExists("lib/x86/libgnustl_shared.so");
} else {
zipInspector.assertFileExists("lib/armeabi-v7a/libc++_shared.so");
zipInspector.assertFileExists("lib/x86/libc++_shared.so");
}
zipInspector.assertFileExists("assets/native.cxx.lib/libs.txt");
zipInspector.assertFileExists("assets/native.cxx.lib/libs.xzs");
}
@Test
public void testCxxLibraryDepClang() throws IOException {
String target = "//apps/sample:app_cxx_lib_dep";
ProcessResult result =
workspace.runBuckCommand(
"build", "-c", "ndk.compiler=clang", "-c", "ndk.cxx_runtime=libcxx", target);
result.assertSuccess();
ZipInspector zipInspector =
new ZipInspector(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem.getBuckPaths(), BuildTargetFactory.newInstance(target), "%s.apk")));
zipInspector.assertFileExists("lib/armeabi-v7a/libnative_cxx_lib.so");
zipInspector.assertFileExists("lib/armeabi-v7a/libc++_shared.so");
zipInspector.assertFileExists("lib/x86/libnative_cxx_lib.so");
zipInspector.assertFileExists("lib/x86/libc++_shared.so");
}
@Test
public void testCxxLibraryDepWithNoFilters() throws IOException {
String target = "//apps/sample:app_cxx_lib_dep_no_filters";
workspace.runBuckCommand("build", target).assertSuccess();
ZipInspector zipInspector =
new ZipInspector(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem.getBuckPaths(), BuildTargetFactory.newInstance(target), "%s.apk")));
if (AssumeAndroidPlatform.get(workspace).isArmAvailable()) {
zipInspector.assertFileExists("lib/armeabi/libnative_cxx_lib.so");
}
zipInspector.assertFileExists("lib/armeabi-v7a/libnative_cxx_lib.so");
zipInspector.assertFileExists("lib/x86/libnative_cxx_lib.so");
}
@Test
public void testNoCxxDepsDoesNotIncludeNdkRuntime() throws IOException {
String target = "//apps/sample:app_no_cxx_deps";
workspace.runBuckCommand("build", target).assertSuccess();
ZipInspector zipInspector =
new ZipInspector(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem.getBuckPaths(), BuildTargetFactory.newInstance(target), "%s.apk")));
if (AssumeAndroidPlatform.get(workspace).isGnuStlAvailable()) {
zipInspector.assertFileDoesNotExist("lib/armeabi-v7a/libgnustl_shared.so");
zipInspector.assertFileDoesNotExist("lib/x86/libgnustl_shared.so");
} else {
zipInspector.assertFileDoesNotExist("lib/armeabi-v7a/libc++_shared.so");
zipInspector.assertFileDoesNotExist("lib/x86/libc++_shared.so");
}
}
@Test
public void testStaticCxxLibraryDep() throws IOException {
String target = "//apps/sample:app_static_cxx_lib_dep";
workspace.runBuckCommand("build", target).assertSuccess();
ZipInspector zipInspector =
new ZipInspector(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem.getBuckPaths(), BuildTargetFactory.newInstance(target), "%s.apk")));
zipInspector.assertFileExists("lib/x86/libnative_cxx_foo1.so");
zipInspector.assertFileExists("lib/x86/libnative_cxx_foo2.so");
zipInspector.assertFileDoesNotExist("lib/x86/libnative_cxx_bar.so");
}
@Test
public void testHeaderOnlyCxxLibrary() throws IOException {
String target = "//apps/sample:app_header_only_cxx_lib_dep";
workspace.runBuckCommand("build", target).assertSuccess();
ZipInspector zipInspector =
new ZipInspector(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem.getBuckPaths(), BuildTargetFactory.newInstance(target), "%s.apk")));
zipInspector.assertFileDoesNotExist("lib/x86/libnative_cxx_headeronly.so");
}
@Test
public void testX86OnlyCxxLibrary() throws IOException {
String target = "//apps/sample:app_with_x86_lib";
workspace.runBuckCommand("build", target).assertSuccess();
ZipInspector zipInspector =
new ZipInspector(
workspace.getPath(
BuildTargetPaths.getGenPath(
filesystem.getBuckPaths(), BuildTargetFactory.newInstance(target), "%s.apk")));
zipInspector.assertFileDoesNotExist("lib/armeabi-v7a/libnative_cxx_x86-only.so");
zipInspector.assertFileDoesNotExist("lib/armeabi/libnative_cxx_x86-only.so");
zipInspector.assertFileExists("lib/x86/libnative_cxx_x86-only.so");
if (AssumeAndroidPlatform.get(workspace).isGnuStlAvailable()) {
zipInspector.assertFileDoesNotExist("lib/armeabi-v7a/libgnustl_shared.so");
zipInspector.assertFileDoesNotExist("lib/armeabi/libgnustl_shared.so");
zipInspector.assertFileExists("lib/x86/libgnustl_shared.so");
} else {
zipInspector.assertFileDoesNotExist("lib/armeabi-v7a/libc++_shared.so");
zipInspector.assertFileDoesNotExist("lib/armeabi/libc++_shared.so");
zipInspector.assertFileExists("lib/x86/libc++_shared.so");
}
}
@Test
public void testCxxLibraryDepWithUnifiedHeaders() throws IOException {
AssumeAndroidPlatform.get(workspace).assumeUnifiedHeadersAvailable();
String target = "//apps/sample:app_cxx_lib_dep";
workspace.addBuckConfigLocalOption("ndk", "use_unified_headers", "true");
workspace.runBuckCommand("build", target).assertSuccess();
}
@Test
public void testCxxLibraryDepWithConstraints() throws IOException {
String target = "//apps/sample:app_cxx_lib_dep_with_constraints_without_cpu_map";
Path path =
workspace.buildAndReturnOutput(target, "--target-platforms", "//:android-x86_32-armv7");
ZipInspector zipInspector = new ZipInspector(workspace.getPath(path));
zipInspector.assertFileExists("lib/armeabi-v7a/libnative_cxx_lib-with-platform-deps.so");
zipInspector.assertFileDoesNotExist("lib/armeabi-v7a/libnative_cxx_x86-only-2.so");
zipInspector.assertFileDoesNotExist("lib/armeabi-v7a/libnative_cxx_error.so");
zipInspector.assertFileExists("lib/x86/libnative_cxx_lib-with-platform-deps.so");
zipInspector.assertFileExists("lib/x86/libnative_cxx_x86-only-2.so");
zipInspector.assertFileDoesNotExist("lib/x86/libnative_cxx_error.so");
if (AssumeAndroidPlatform.get(workspace).isGnuStlAvailable()) {
zipInspector.assertFileExists("lib/armeabi-v7a/libgnustl_shared.so");
zipInspector.assertFileExists("lib/x86/libgnustl_shared.so");
} else {
zipInspector.assertFileExists("lib/armeabi-v7a/libc++_shared.so");
zipInspector.assertFileExists("lib/x86/libc++_shared.so");
}
}
@Ignore
@Test
public void cannotBuildBinaryWithAndroidPlatformAndWithoutCpuMap() {
String target = "//apps/sample:app_cxx_lib_dep_with_constraints_without_cpu_map";
ProcessResult result =
workspace.runBuckCommand("build", target, "--target-platforms", "//:android-x86_64-arm");
result.assertFailure();
assertThat(
result.getStderr(),
containsString(
"//apps/sample:app_cxx_lib_dep_with_constraints_without_cpu_map: "
+ "nested platform is not found for ARMV7 CPU type "
+ "in platform //:android-x86_64-arm"));
}
}
| |
import java.util.*;
import java.awt.*;
import java.io.*;
import java.awt.event.*;
import javax.swing.*;
import javax.swing.event.*;
class nwchem_Rama extends JFrame implements ActionListener, ChangeListener, WindowListener, MouseListener {
Font defaultFont;
int setnumber=0;
JFileChooser chooser;
ExtensionFilter ramFilter;
JFrame dialogFrame;
BufferedReader br;
String card;
Graph ramPlot = new Graph();
JLabel systemLabel = new JLabel();
JButton doneButton = new JButton("done");
JButton clearButton = new JButton("clear");
double time,rms1,rms2;
int numset = 0;
int numres = 0;
int currentSelection;
int frame=0;
DefaultListModel resList = new DefaultListModel();
JList rList = new JList(resList);
JScrollPane resPane = new JScrollPane(rList);
public nwchem_Rama(){
super("Ramachandran Viewer");
defaultFont = new Font("Dialog", Font.BOLD,12);
super.getContentPane().setLayout(new GridBagLayout());
super.getContentPane().setForeground(Color.black);
super.getContentPane().setBackground(Color.lightGray);
super.getContentPane().setFont(defaultFont);
super.addWindowListener(this);
chooser = new JFileChooser("./");
ramFilter = new ExtensionFilter(".ram");
chooser.setFileFilter(ramFilter);
dialogFrame = new JFrame();
dialogFrame.setSize(300,400);
chooser.showOpenDialog(dialogFrame);
JPanel header = new JPanel();
header.setLayout(new GridBagLayout());
header.setForeground(Color.black);
header.setBackground(Color.lightGray);
addComponent(super.getContentPane(),header,0,0,2,1,1,1,
GridBagConstraints.NONE,GridBagConstraints.WEST);
JLabel systemLabel = new JLabel(chooser.getSelectedFile().toString());
addComponent(header,systemLabel,2,0,10,1,1,1,
GridBagConstraints.NONE,GridBagConstraints.NORTHWEST);
systemLabel.setForeground(Color.black);
addComponent(header,resPane,21,3,3,1,1,10,
GridBagConstraints.NONE,GridBagConstraints.WEST);
rList.addMouseListener(this);
addComponent(header,clearButton,21,4,1,1,1,1,
GridBagConstraints.NONE,GridBagConstraints.NORTHWEST);
clearButton.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent e){
for(int i=0; i<numset; i++){ramPlot.removeSet(i);}; numset=0; ramPlot.repaint(); }});
addComponent(header,doneButton,21,5,1,1,1,1,
GridBagConstraints.NONE,GridBagConstraints.NORTHWEST);
doneButton.addActionListener(new ActionListener(){
public void actionPerformed(ActionEvent e){
setVisible(false); }});
try{
BufferedReader br = new BufferedReader(new FileReader(chooser.getSelectedFile().toString()));
String card;
card=br.readLine();
card=br.readLine();
while(!card.startsWith(" 0")){
frame=Integer.parseInt(card.substring(1,7).trim());
resList.addElement("Residue "+frame);
card=br.readLine();
};
br.close();
} catch(Exception ee) {ee.printStackTrace();};
rList.setVisibleRowCount(15);
ramPlot.setTitle("Ramachandran");
ramPlot.setSize(600,600);
ramPlot.init();
ramPlot.fixRange(-3.1415,3.1415,-3.1415,3.1415);
addComponent(header,ramPlot,0,1,20,10,10,10,
GridBagConstraints.NONE,GridBagConstraints.NORTHWEST);
setLocation(25,225);
setSize(900,700);
setVisible(true);
}
void plot_single(int ndx){
try{
BufferedReader br = new BufferedReader(new FileReader(chooser.getSelectedFile().toString()));
String card;
boolean first=true;
double phi, psi, phi0, psi0;
phi0=0.0;
psi0=0.0;
while((card=br.readLine()) != null){
time=Double.valueOf(card.substring(1,12)).doubleValue();
card=br.readLine();
frame=1;
while(!card.startsWith(" 0")){
if(frame==ndx){
phi=Double.valueOf(card.substring(11,22)).doubleValue();
psi=Double.valueOf(card.substring(23,34)).doubleValue();
if(!first){
if(Math.abs(phi0-phi)>3.14){first=true;};
if(Math.abs(psi0-psi)>3.14){first=true;};
};
ramPlot.addData(numset,phi,psi,!first,false); first=false;
phi0=phi; psi0=psi;
};
card=br.readLine(); frame++;
};
};
// ramPlot.fillPlot();
numset++;
br.close();
} catch(Exception ee) {ee.printStackTrace();};
};
void buildConstraints(GridBagConstraints gbc, int gx, int gy, int gw, int gh,
int wx, int wy){
gbc.gridx = gx;
gbc.gridy = gy;
gbc.gridwidth = gw;
gbc.gridheight = gh;
gbc.weightx = wx;
gbc.weighty = wy;
}
static void addComponent(Container container, Component component,
int gridx, int gridy, int gridwidth,
int gridheight, double weightx,
double weighty, int fill, int anchor) {
LayoutManager lm = container.getLayout();
if(!(lm instanceof GridBagLayout)){
System.out.println("Illegal layout"); System.exit(1);
} else {
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridx=gridx;
gbc.gridy=gridy;
gbc.gridwidth=gridwidth;
gbc.gridheight=gridheight;
gbc.weightx=weightx;
gbc.weighty=weighty;
gbc.fill=fill;
gbc.anchor=anchor;
container.add(component,gbc);
}
}
public void actionPerformed(ActionEvent e) {
}
public void stateChanged(ChangeEvent e) {}
public void windowClosing(WindowEvent event) {}
public void windowClosed(WindowEvent event) { }
public void windowDeiconified(WindowEvent event) {}
public void windowIconified(WindowEvent event) {}
public void windowActivated(WindowEvent event) {}
public void windowDeactivated(WindowEvent e) {}
public void windowOpened(WindowEvent event) {}
public void mouseClicked(MouseEvent mouse) {}
public void mousePressed(MouseEvent mouse){}
public void mouseReleased(MouseEvent mouse){
if(mouse.getModifiers()==MouseEvent.BUTTON1_MASK){
if(mouse.getSource()==rList){
currentSelection=rList.getSelectedIndex()+1;
plot_single(currentSelection);
};
};
}
public void mouseEntered(MouseEvent mouse){
}
public void mouseExited(MouseEvent mouse){
}
}
| |
package info.faceland.loot.utils;
import static info.faceland.loot.utils.InventoryUtil.broadcast;
import static info.faceland.loot.utils.InventoryUtil.getFirstColor;
import com.tealcube.minecraft.bukkit.shade.apache.commons.lang3.StringUtils;
import com.tealcube.minecraft.bukkit.shade.apache.commons.lang3.math.NumberUtils;
import com.tealcube.minecraft.bukkit.shade.google.common.base.CharMatcher;
import info.faceland.loot.LootPlugin;
import info.faceland.loot.api.items.CustomItem;
import info.faceland.loot.api.items.ItemGenerationReason;
import info.faceland.loot.api.sockets.SocketGem;
import info.faceland.loot.data.BuiltItem;
import info.faceland.loot.data.ItemRarity;
import info.faceland.loot.data.UniqueLoot;
import info.faceland.loot.data.UpgradeScroll;
import info.faceland.loot.enchantments.EnchantmentTome;
import info.faceland.loot.events.LootDropEvent;
import info.faceland.loot.items.prefabs.ArcaneEnhancer;
import info.faceland.loot.items.prefabs.IdentityTome;
import info.faceland.loot.items.prefabs.PurifyingScroll;
import info.faceland.loot.items.prefabs.SocketExtender;
import info.faceland.loot.items.prefabs.UnidentifiedItem;
import info.faceland.loot.math.LootRandom;
import info.faceland.loot.tier.Tier;
import io.pixeloutlaw.minecraft.spigot.hilt.ItemStackExtensionsKt;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.UUID;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.Item;
import org.bukkit.entity.Player;
import org.bukkit.event.Listener;
import org.bukkit.inventory.ItemFlag;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.metadata.FixedMetadataValue;
public class DropUtil implements Listener {
private static LootPlugin plugin;
private static String itemFoundFormat;
private static Map<EntityType, Double> specialStatEntities;
private static Map<String, Double> specialStatWorlds;
private static double customizedTierChance;
private static double normalDropChance;
private static double scrollDropChance;
private static double socketDropChance;
private static double tomeDropChance;
private static double enhancerDropChance;
private static double purityDropChance;
private static LootRandom random;
public static void refresh() {
plugin = LootPlugin.getInstance();
itemFoundFormat = plugin.getSettings().getString("language.broadcast.found-item", "");
specialStatEntities = plugin.fetchSpecialStatEntities();
specialStatWorlds = plugin.fetchSpecialStatWorlds();
customizedTierChance = plugin.getSettings()
.getDouble("config.drops.customized-tier-chance", 0D);
normalDropChance = plugin.getSettings().getDouble("config.drops.normal-drop", 0D);
scrollDropChance = plugin.getSettings().getDouble("config.drops.upgrade-scroll", 0D);
socketDropChance = plugin.getSettings().getDouble("config.drops.socket-gem", 0D);
tomeDropChance = plugin.getSettings().getDouble("config.drops.enchant-gem", 0D);
enhancerDropChance = plugin.getSettings().getDouble("config.drops.arcane-enhancer", 0D);
purityDropChance = plugin.getSettings().getDouble("config.drops.purity-scroll", 0D);
random = new LootRandom();
}
public static void dropLoot(LootDropEvent event) {
Player killer = Bukkit.getPlayer(event.getLooterUUID());
if (killer == null) {
return;
}
double dropMultiplier = event.getQuantityMultiplier();
double rarityMultiplier = event.getQualityMultiplier();
int mobLevel = event.getMonsterLevel();
List<ItemRarity> bonusDrops = new ArrayList<>(event.getBonusTierItems());
if (StringUtils.isNotBlank(event.getUniqueEntity())) {
if (plugin.getUniqueDropsManager().getData(event.getUniqueEntity()) != null) {
UniqueLoot loot = plugin.getUniqueDropsManager().getData(event.getUniqueEntity());
dropMultiplier *= loot.getQuantityMultiplier();
rarityMultiplier *= loot.getQualityMultiplier();
doUniqueDrops(loot, event.getLocation(), killer);
bonusDrops.addAll(loot.getBonusEquipment());
}
}
EntityType entityType = event.getEntity().getType();
String worldName = event.getLocation().getWorld().getName();
boolean specialStat = addSpecialStat(entityType, worldName);
boolean normalDrop = dropMultiplier * normalDropChance > random.nextDouble();
while (bonusDrops.size() > 0 || normalDrop) {
Tier tier = getTier(killer);
ItemRarity rarity;
if (rarityMultiplier == 1D) {
rarity = plugin.getRarityManager().getRandomRarity();
} else {
rarity = plugin.getRarityManager().getRandomRarityWithBonus(rarityMultiplier);
}
if (bonusDrops.size() > 0) {
ItemRarity dropRarity = bonusDrops.get(random.nextIntRange(0, bonusDrops.size()));
if (dropRarity.getPower() > rarity.getPower()) {
rarity = dropRarity;
}
bonusDrops.remove(dropRarity);
} else {
normalDrop = false;
}
BuiltItem builtItem = plugin.getNewItemBuilder()
.withTier(tier)
.withRarity(rarity)
.withLevel(Math.max(1, Math.min(mobLevel + 1 - random.nextIntRange(0, 4), 100)))
.withItemGenerationReason(ItemGenerationReason.MONSTER)
.withSpecialStat(specialStat)
.build();
ItemStack tierItem = builtItem.getStack();
int qualityBonus = 1;
double qualityChance = plugin.getSettings().getDouble("config.random-quality-chance", 0.1);
double multiQualityChance = plugin.getSettings()
.getDouble("config.multi-quality-chance", 0.1);
if (random.nextDouble() <= qualityChance) {
while (random.nextDouble() <= multiQualityChance && qualityBonus < 5) {
qualityBonus++;
}
upgradeItemQuality(tierItem, qualityBonus);
}
int upgradeBonus = 1;
double upgradeChance = plugin.getSettings().getDouble("config.random-upgrade-chance", 0.1);
double multiUpgradeChance = plugin.getSettings()
.getDouble("config.multi-upgrade-chance", 0.1);
if (random.nextDouble() <= upgradeChance) {
while (random.nextDouble() <= multiUpgradeChance && upgradeBonus < 9) {
upgradeBonus++;
}
upgradeItem(tierItem, upgradeBonus);
}
boolean broadcast = rarity.isBroadcast() || upgradeBonus > 4 || qualityBonus > 2;
dropItem(event.getLocation(), tierItem, killer, builtItem.getTicksLived(), broadcast);
}
if (random.nextDouble() < dropMultiplier * plugin.getSettings()
.getDouble("config.drops.craft-mat", 0D)) {
Object[] matArr = plugin.getCraftMatManager().getCraftMaterials().keySet().toArray();
Material m = (Material) matArr[random.nextInt(matArr.length)];
int quality = 2;
while (random.nextDouble() <= plugin.getSettings()
.getDouble("config.drops.material-quality-up", 0.1D) &&
quality < 3) {
quality++;
}
double materialLevel = mobLevel - (mobLevel * 0.3 * random.nextDouble());
ItemStack his = MaterialUtil.buildMaterial(
m, plugin.getCraftMatManager().getCraftMaterials().get(m), (int) materialLevel, quality);
his.setAmount(1 + random.nextInt(2));
dropItem(event.getLocation(), his, killer, false);
}
if (random.nextDouble() < dropMultiplier * socketDropChance) {
SocketGem sg;
if (plugin.getSettings().getBoolean("config.beast.beast-mode-activate", false)) {
sg = plugin.getSocketGemManager().getRandomSocketGemByLevel(mobLevel);
} else {
sg = plugin.getSocketGemManager().getRandomSocketGem(true, event.getDistance());
}
ItemStack his = sg.toItemStack(1);
dropItem(event.getLocation(), his, killer, sg.isBroadcast());
}
if (plugin.getSettings().getBoolean("config.custom-enchanting", true)) {
if (random.nextDouble() < dropMultiplier * tomeDropChance) {
EnchantmentTome es = plugin.getEnchantTomeManager().getRandomEnchantTome(rarityMultiplier);
ItemStack his = es.toItemStack(1);
dropItem(event.getLocation(), his, killer, es.isBroadcast());
}
if (random.nextDouble() < dropMultiplier * enhancerDropChance) {
dropItem(event.getLocation(), ArcaneEnhancer.get(), killer, true);
}
if (random.nextDouble() < dropMultiplier * purityDropChance) {
dropItem(event.getLocation(), PurifyingScroll.get(), killer, false);
}
}
if (random.nextDouble() < dropMultiplier * scrollDropChance) {
UpgradeScroll us = plugin.getScrollManager().getRandomScroll();
ItemStack stack = plugin.getScrollManager().buildItemStack(us);
dropItem(event.getLocation(), stack, killer, us.isBroadcast());
}
if (random.nextDouble() < dropMultiplier * plugin.getSettings()
.getDouble("config.drops.identity-tome", 0D)) {
ItemStack his = new IdentityTome();
dropItem(event.getLocation(), his, killer, false);
}
if (random.nextDouble() < dropMultiplier * plugin.getSettings()
.getDouble("config.drops.custom-item", 0D)) {
CustomItem ci;
if (plugin.getSettings().getBoolean("config.beast.beast-mode-activate", false)) {
ci = plugin.getCustomItemManager().getRandomCustomItemByLevel(mobLevel);
} else {
ci = plugin.getCustomItemManager()
.getRandomCustomItem(true, event.getDistance());
}
ItemStack stack = ci.toItemStack(1);
int qualityBonus = 1;
if (ci.canBeQuality()) {
double qualityChance = plugin.getSettings().getDouble("config.random-quality-chance", 0.1);
double multiQualityChance = plugin.getSettings()
.getDouble("config.multi-quality-chance", 0.1);
if (random.nextDouble() <= qualityChance) {
while (random.nextDouble() <= multiQualityChance && qualityBonus < 5) {
qualityBonus++;
}
stack = upgradeItemQuality(stack, qualityBonus);
}
}
boolean broadcast = ci.isBroadcast() || qualityBonus > 2;
dropItem(event.getLocation(), stack, killer, broadcast);
}
if (random.nextDouble() < plugin.getSettings().getDouble("config.drops.socket-extender", 0D)) {
ItemStack his = new SocketExtender();
dropItem(event.getLocation(), his, killer, true);
}
// NOTE: Drop bonus should not be applied to Unidentified Items!
if (random.nextDouble() < dropMultiplier * plugin.getSettings()
.getDouble("config.drops.unidentified-item", 0D)) {
Material m = Material.WOODEN_SWORD;
ItemStack his;
if (plugin.getSettings().getBoolean("config.beast.beast-mode-activate", false)) {
his = new UnidentifiedItem(m, Math.min(mobLevel, 100));
} else {
his = new UnidentifiedItem(m, -1);
}
ItemMeta itemMeta = his.getItemMeta();
itemMeta.addItemFlags(ItemFlag.HIDE_ATTRIBUTES);
his.setItemMeta(itemMeta);
dropItem(event.getLocation(), his, null, false);
}
}
private static void doUniqueDrops(UniqueLoot uniqueLoot, Location location, Player killer) {
for (String gemString : uniqueLoot.getGemMap().keySet()) {
if (uniqueLoot.getGemMap().get(gemString) > random.nextDouble()) {
SocketGem gem = plugin.getSocketGemManager().getSocketGem(gemString);
if (gem == null) {
continue;
}
ItemStack his = gem.toItemStack(1);
dropItem(location, his, killer, gem.isBroadcast());
}
}
for (String tomeString : uniqueLoot.getTomeMap().keySet()) {
if (uniqueLoot.getTomeMap().get(tomeString) > random.nextDouble()) {
EnchantmentTome tome = plugin.getEnchantTomeManager().getEnchantTome(tomeString);
if (tome == null) {
continue;
}
ItemStack his = tome.toItemStack(1);
dropItem(location, his, killer, tome.isBroadcast());
}
}
for (String tableName : uniqueLoot.getCustomItemMap().keySet()) {
double totalWeight = 0;
for (double weight : uniqueLoot.getCustomItemMap().get(tableName).values()) {
totalWeight += weight;
}
totalWeight *= random.nextDouble();
double currentWeight = 0;
for (String customName : uniqueLoot.getCustomItemMap().get(tableName).keySet()) {
currentWeight += uniqueLoot.getCustomItemMap().get(tableName).get(customName);
if (currentWeight >= totalWeight) {
if ("NO_DROP_WEIGHT".equalsIgnoreCase(customName)) {
break;
}
CustomItem ci = plugin.getCustomItemManager().getCustomItem(customName);
if (ci == null) {
break;
}
ItemStack his = ci.toItemStack(1);
dropItem(location, his, killer, ci.isBroadcast());
break;
}
}
}
}
private static ItemStack upgradeItem(ItemStack his, int upgradeBonus) {
boolean succeed = false;
List<String> lore = ItemStackExtensionsKt.getLore(his);
for (int i = 0; i < lore.size(); i++) {
String s = lore.get(i);
String ss = ChatColor.stripColor(s);
if (!ss.startsWith("+")) {
continue;
}
succeed = true;
String loreLev = CharMatcher.digit().or(CharMatcher.is('-')).retainFrom(ss);
int loreLevel = NumberUtils.toInt(loreLev);
lore.set(i, s.replace("+" + loreLevel, "+" + (loreLevel + upgradeBonus)));
String name = getFirstColor(ItemStackExtensionsKt.getDisplayName(his)) +
("+" + upgradeBonus) + " " + ItemStackExtensionsKt.getDisplayName(his);
ItemStackExtensionsKt.setDisplayName(his, name);
break;
}
if (succeed) {
ItemStackExtensionsKt.setLore(his, lore);
if (upgradeBonus > 6) {
his.addUnsafeEnchantment(Enchantment.DURABILITY, 1);
ItemStackExtensionsKt.addItemFlags(his, ItemFlag.HIDE_ATTRIBUTES, ItemFlag.HIDE_ENCHANTS);
}
}
return his;
}
private static ItemStack upgradeItemQuality(ItemStack his, int upgradeBonus) {
boolean succeed = false;
List<String> lore = ItemStackExtensionsKt.getLore(his);
for (int i = 0; i < lore.size(); i++) {
String s = lore.get(i);
String ss = ChatColor.stripColor(s);
if (!ss.startsWith("+")) {
continue;
}
succeed = true;
String loreLev = CharMatcher.digit().or(CharMatcher.is('-')).retainFrom(ss);
int loreLevel = NumberUtils.toInt(loreLev);
lore.set(i, s.replace("+" + loreLevel, "+" + (loreLevel + upgradeBonus)));
String qualityEnhanceName = plugin.getSettings()
.getString("language.quality." + upgradeBonus, "");
String name = getFirstColor(ItemStackExtensionsKt.getDisplayName(his)) +
qualityEnhanceName + " " + ItemStackExtensionsKt.getDisplayName(his);
ItemStackExtensionsKt.setDisplayName(his, name);
break;
}
if (succeed) {
ItemStackExtensionsKt.setLore(his, lore);
}
return his;
}
private static void dropItem(Location loc, ItemStack itemStack, Player looter, boolean broadcast) {
dropItem(loc, itemStack, looter, 0, broadcast);
}
private static void dropItem(Location loc, ItemStack itemStack, Player looter, int ticksLived,
boolean broadcast) {
Item drop = Objects.requireNonNull(loc.getWorld()).dropItemNaturally(loc, itemStack);
if (ticksLived != 0) {
drop.setTicksLived(ticksLived);
}
if (looter != null) {
applyOwnerMeta(drop, looter.getUniqueId());
if (broadcast) {
broadcast(looter, itemStack, itemFoundFormat);
}
}
}
private static void applyOwnerMeta(Item drop, UUID owner) {
drop.setMetadata("loot-owner", new FixedMetadataValue(plugin, owner));
drop.setMetadata("loot-time", new FixedMetadataValue(plugin, System.currentTimeMillis()));
}
private static boolean addSpecialStat(EntityType entityType, String worldName) {
return (specialStatEntities.containsKey(entityType) && random.nextDouble() < specialStatEntities
.get(entityType))
|| ((specialStatWorlds.containsKey(worldName)) && random.nextDouble() < specialStatWorlds
.get(worldName));
}
public static Tier getTier(Player killer) {
if (customizedTierChance < random.nextDouble()) {
return plugin.getTierManager().getRandomTier();
}
List<Material> wornMaterials = getWornMaterials(killer);
List<Tier> wornTiers = new ArrayList<>();
for (Material m : wornMaterials) {
Set<Tier> tiers = plugin.getItemGroupManager().getMaterialGroup(m);
if (tiers != null) {
wornTiers.addAll(tiers);
}
}
if (wornTiers.isEmpty()) {
return plugin.getTierManager().getRandomTier();
}
return wornTiers.get(random.nextIntRange(0, wornTiers.size()));
}
private static List<Material> getWornMaterials(Player player) {
List<Material> materials = new ArrayList<>();
for (ItemStack stack : player.getEquipment().getArmorContents()) {
if (stack == null || stack.getType() == Material.AIR) {
continue;
}
materials.add(stack.getType());
}
ItemStack handItem = player.getEquipment().getItemInMainHand();
if (handItem.getType() != Material.AIR) {
materials.add(handItem.getType());
}
ItemStack offItem = player.getEquipment().getItemInMainHand();
if (offItem.getType() != Material.AIR) {
materials.add(offItem.getType());
}
return materials;
}
}
| |
package org.jivesoftware.util;
import org.apache.commons.pool2.BasePooledObjectFactory;
import org.apache.commons.pool2.ObjectPool;
import org.apache.commons.pool2.PooledObject;
import org.apache.commons.pool2.impl.DefaultPooledObject;
import org.apache.commons.pool2.impl.GenericObjectPool;
import org.jsmpp.bean.*;
import org.jsmpp.extra.NegativeResponseException;
import org.jsmpp.session.BindParameter;
import org.jsmpp.session.SMPPSession;
import org.jsmpp.util.AbsoluteTimeFormatter;
import org.jsmpp.util.TimeFormatter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* A service to send SMS messages.<p>
*
* This class is configured with a set of Jive properties. Note that each service provider can require a different set
* of properties to be set.
* <ul>
* <li>{@code sms.smpp.connections.maxAmount} -- the maximum amount of connections. The default value is one.
* <li>{@code sms.smpp.connections.idleMillis} -- time (in ms) after which idle connections are allowed to be evicted. Defaults to two minutes.
* <li>{@code sms.smpp.host} -- the host name of your SMPP Server or SMSC, i.e. smsc.example.org. The default value is "localhost".
* <li>{@code sms.smpp.port} -- the port on which the SMSC is listening. Defaults to 2775.
* <li>{@code sms.smpp.systemId} -- the 'user name' to use when connecting to the SMSC.
* <li>{@code sms.smpp.password} -- the password that authenticates the systemId value when connecting to the SMSC.
* <li>{@code sms.smpp.systemType} -- an optional system type, which, if defined, will be used when connecting to the SMSC.
* <li>{@code sms.smpp.receive.ton} -- The type-of-number value for 'receiving' SMS messages. Defaults to 'UNKNOWN'.
* <li>{@code sms.smpp.receive.npi} -- The number-plan-indicator value for 'receiving' SMS messages. Defaults to 'UNKNOWN'.
* <li>{@code sms.smpp.source.ton} -- The type-of-number value for the source of SMS messages. Defaults to 'UNKNOWN'.
* <li>{@code sms.smpp.source.npi} -- The number-plan-indicator value for the source of SMS messages. Defaults to 'UNKNOWN'.
* <li>{@code sms.smpp.source.address} -- The source address of SMS messages.
* <li>{@code sms.smpp.destination.ton} -- The type-of-number value for the destination of SMS messages. Defaults to 'UNKNOWN'.
* <li>{@code sms.smpp.destination.npi} -- The number-plan-indicator value for the destination of SMS messages. Defaults to 'UNKNOWN'.
* </ul>
*
* @author Guus der Kinderen, guus@goodbytes.nl
*/
public class SmsService
{
private static final Logger Log = LoggerFactory.getLogger( SmsService.class );
private static TimeFormatter timeFormatter = new AbsoluteTimeFormatter();
private static SmsService INSTANCE;
public static synchronized SmsService getInstance()
{
if ( INSTANCE == null )
{
INSTANCE = new SmsService();
}
return INSTANCE;
}
/**
* Pool of SMPP sessions that is used to transmit messages to the SMSC.
*/
private final SMPPSessionPool sessionPool;
private SmsService()
{
sessionPool = new SMPPSessionPool();
PropertyEventDispatcher.addListener( sessionPool );
}
/**
* Causes a new SMS message to be sent.
*
* Note that the message is sent asynchronously. This method does not block. A successful invocation does not
* guarantee successful delivery
*
* @param message The body of the message (cannot be null or empty).
* @param recipient The address / phone number to which the message is to be send (cannot be null or empty).
*/
public void send( String message, String recipient )
{
if ( message == null || message.isEmpty() )
{
throw new IllegalArgumentException( "Argument 'message' cannot be null or an empty String." );
}
if ( recipient == null || recipient.isEmpty() )
{
throw new IllegalArgumentException( "Argument 'recipient' cannot be null or an empty String." );
}
TaskEngine.getInstance().submit( new SmsTask( sessionPool, message, recipient ) );
}
/**
* Causes a new SMS message to be sent.
*
* This method differs from {@link #send(String, String)} in that the message is sent before this method returns,
* rather than queueing the messages to be sent later (in an async fashion). As a result, any exceptions that occur
* while sending the message are thrown by this method (which can be useful to test the configuration of this
* service).
*
* @param message The body of the message (cannot be null or empty).
* @param recipient The address / phone number to which the message is to be send (cannot be null or empty).
* @throws Exception On any problem.
*/
public void sendImmediately( String message, String recipient ) throws Exception
{
if ( message == null || message.isEmpty() )
{
throw new IllegalArgumentException( "Argument 'message' cannot be null or an empty String." );
}
if ( recipient == null || recipient.isEmpty() )
{
throw new IllegalArgumentException( "Argument 'recipient' cannot be null or an empty String." );
}
try
{
new SmsTask( sessionPool, message, recipient ).sendMessage();
}
catch ( Exception e )
{
Log.error( "An exception occurred while sending a SMS message (to '{}')", recipient, e );
throw e;
}
}
/**
* Checks if an exception in the chain of the provided throwable contains a 'command status' that can be
* translated in a somewhat more helpful error message.
*
* The list of error messages was taken from http://www.smssolutions.net/tutorials/smpp/smpperrorcodes/
*
* @param ex The exception in which to search for a command status.
* @return a human readable error message.
*/
public static String getDescriptiveMessage( Throwable ex )
{
if ( ex instanceof NegativeResponseException )
{
final Map<Integer, String> errors = new HashMap<>();
errors.put( 0x00000000, "No Error" );
errors.put( 0x00000001, "Message too long" );
errors.put( 0x00000002, "Command length is invalid" );
errors.put( 0x00000003, "Command ID is invalid or not supported" );
errors.put( 0x00000004, "Incorrect bind status for given command" );
errors.put( 0x00000005, "Already bound" );
errors.put( 0x00000006, "Invalid Priority Flag" );
errors.put( 0x00000007, "Invalid registered delivery flag" );
errors.put( 0x00000008, "System error" );
errors.put( 0x0000000A, "Invalid source address" );
errors.put( 0x0000000B, "Invalid destination address" );
errors.put( 0x0000000C, "Message ID is invalid" );
errors.put( 0x0000000D, "Bind failed" );
errors.put( 0x0000000E, "Invalid password" );
errors.put( 0x0000000F, "Invalid System ID" );
errors.put( 0x00000011, "Cancelling message failed" );
errors.put( 0x00000013, "Message recplacement failed" );
errors.put( 0x00000014, "Message queue full" );
errors.put( 0x00000015, "Invalid service type" );
errors.put( 0x00000033, "Invalid number of destinations" );
errors.put( 0x00000034, "Invalid distribution list name" );
errors.put( 0x00000040, "Invalid destination flag" );
errors.put( 0x00000042, "Invalid submit with replace request" );
errors.put( 0x00000043, "Invalid esm class set" );
errors.put( 0x00000044, "Invalid submit to ditribution list" );
errors.put( 0x00000045, "Submitting message has failed" );
errors.put( 0x00000048, "Invalid source address type of number ( TON )" );
errors.put( 0x00000049, "Invalid source address numbering plan ( NPI )" );
errors.put( 0x00000050, "Invalid destination address type of number ( TON )" );
errors.put( 0x00000051, "Invalid destination address numbering plan ( NPI )" );
errors.put( 0x00000053, "Invalid system type" );
errors.put( 0x00000054, "Invalid replace_if_present flag" );
errors.put( 0x00000055, "Invalid number of messages" );
errors.put( 0x00000058, "Throttling error" );
errors.put( 0x00000061, "Invalid scheduled delivery time" );
errors.put( 0x00000062, "Invalid Validty Period value" );
errors.put( 0x00000063, "Predefined message not found" );
errors.put( 0x00000064, "ESME Receiver temporary error" );
errors.put( 0x00000065, "ESME Receiver permanent error" );
errors.put( 0x00000066, "ESME Receiver reject message error" );
errors.put( 0x00000067, "Message query request failed" );
errors.put( 0x000000C0, "Error in the optional part of the PDU body" );
errors.put( 0x000000C1, "TLV not allowed" );
errors.put( 0x000000C2, "Invalid parameter length" );
errors.put( 0x000000C3, "Expected TLV missing" );
errors.put( 0x000000C4, "Invalid TLV value" );
errors.put( 0x000000FE, "Transaction delivery failure" );
errors.put( 0x000000FF, "Unknown error" );
errors.put( 0x00000100, "ESME not authorised to use specified servicetype" );
errors.put( 0x00000101, "ESME prohibited from using specified operation" );
errors.put( 0x00000102, "Specified servicetype is unavailable" );
errors.put( 0x00000103, "Specified servicetype is denied" );
errors.put( 0x00000104, "Invalid data coding scheme" );
errors.put( 0x00000105, "Invalid source address subunit" );
errors.put( 0x00000106, "Invalid destination address subunit" );
errors.put( 0x0000040B, "Insufficient credits to send message" );
errors.put( 0x0000040C, "Destination address blocked by the ActiveXperts SMPP Demo Server" );
String error = errors.get( ( (NegativeResponseException) ex ).getCommandStatus() );
if ( ex.getMessage() != null && !ex.getMessage().isEmpty() )
{
error += " (exception message: '" + ex.getMessage() + "')";
}
return error;
}
else if ( ex.getCause() != null )
{
return getDescriptiveMessage( ex.getCause() );
}
return ex.getMessage();
}
/**
* Runnable that allows an SMS to be sent in a different thread.
*/
private static class SmsTask implements Runnable
{
private final ObjectPool<SMPPSession> sessionPool;
// Settings that apply to source of an SMS message.
private final TypeOfNumber sourceTon = JiveGlobals.getEnumProperty( "sms.smpp.source.ton", TypeOfNumber.class, TypeOfNumber.UNKNOWN );
private final NumberingPlanIndicator sourceNpi = JiveGlobals.getEnumProperty( "sms.smpp.source.npi", NumberingPlanIndicator.class, NumberingPlanIndicator.UNKNOWN );
private final String sourceAddress = JiveGlobals.getProperty( "sms.smpp.source.address" );
// Settings that apply to destination of an SMS message.
private final TypeOfNumber destinationTon = JiveGlobals.getEnumProperty( "sms.smpp.destination.ton", TypeOfNumber.class, TypeOfNumber.UNKNOWN );
private final NumberingPlanIndicator destinationNpi = JiveGlobals.getEnumProperty( "sms.smpp.destination.npi", NumberingPlanIndicator.class, NumberingPlanIndicator.UNKNOWN );
private final String destinationAddress;
private final byte[] message;
// Non-configurable defaults (for now - TODO?)
private final ESMClass esm = new ESMClass();
private final byte protocolId = 0;
private final byte priorityFlag = 1;
private final String serviceType = "CMT";
private final String scheduleDeliveryTime = timeFormatter.format( new Date() );
private final String validityPeriod = null;
private final RegisteredDelivery registeredDelivery = new RegisteredDelivery( SMSCDeliveryReceipt.DEFAULT );
private final byte replaceIfPresentFlag = 0;
private final DataCoding dataCoding = new GeneralDataCoding( Alphabet.ALPHA_DEFAULT, MessageClass.CLASS1, false );
private final byte smDefaultMsgId = 0;
SmsTask( ObjectPool<SMPPSession> sessionPool, String message, String destinationAddress )
{
this.sessionPool = sessionPool;
this.message = message.getBytes();
this.destinationAddress = destinationAddress;
}
@Override
public void run()
{
try
{
sendMessage();
}
catch ( Exception e )
{
Log.error( "An exception occurred while sending a SMS message (to '{}')", destinationAddress, e );
}
}
public void sendMessage() throws Exception
{
final SMPPSession session = sessionPool.borrowObject();
try
{
final String messageId = session.submitShortMessage(
serviceType,
sourceTon, sourceNpi, sourceAddress,
destinationTon, destinationNpi, destinationAddress,
esm, protocolId, priorityFlag,
scheduleDeliveryTime, validityPeriod, registeredDelivery, replaceIfPresentFlag,
dataCoding, smDefaultMsgId, message );
Log.debug( "Message submitted, message_id is '{}'.", messageId );
}
finally
{
sessionPool.returnObject( session );
}
}
}
/**
* A factory of SMPPSession instances that are used in an object pool.
*
* @author Guus der Kinderen, guus.der.kinderen@gmail.com
*/
private static class SMPPSessionFactory extends BasePooledObjectFactory<SMPPSession>
{
private static final Logger Log = LoggerFactory.getLogger( SMPPSessionFactory.class );
@Override
public SMPPSession create() throws Exception
{
// SMSC connection settings
final String host = JiveGlobals.getProperty( "sms.smpp.host", "localhost" );
final int port = JiveGlobals.getIntProperty( "sms.smpp.port", 2775 );
final String systemId = JiveGlobals.getProperty( "sms.smpp.systemId" );
final String password = JiveGlobals.getProperty( "sms.smpp.password" );
final String systemType = JiveGlobals.getProperty( "sms.smpp.systemType" );
// Settings that apply to 'receiving' SMS. Should not apply to this implementation, as we're not receiving anything..
final TypeOfNumber receiveTon = JiveGlobals.getEnumProperty( "sms.smpp.receive.ton", TypeOfNumber.class, TypeOfNumber.UNKNOWN );
final NumberingPlanIndicator receiveNpi = JiveGlobals.getEnumProperty( "sms.smpp.receive.npi", NumberingPlanIndicator.class, NumberingPlanIndicator.UNKNOWN );
Log.debug( "Creating a new sesssion (host: '{}', port: '{}', systemId: '{}'.", host, port, systemId );
final SMPPSession session = new SMPPSession();
session.connectAndBind( host, port, new BindParameter( BindType.BIND_TX, systemId, password, systemType, receiveTon, receiveNpi, null ) );
Log.debug( "Created a new session with ID '{}'.", session.getSessionId() );
return session;
}
@Override
public boolean validateObject( PooledObject<SMPPSession> pooledObject )
{
final SMPPSession session = pooledObject.getObject();
final boolean isValid = session.getSessionState().isTransmittable(); // updated by the SMPPSession internal enquireLink timer.
Log.debug( "Ran a check to see if session with ID '{}' is valid. Outcome: {}", session.getSessionId(), isValid );
return isValid;
}
@Override
public void destroyObject( PooledObject<SMPPSession> pooledObject ) throws Exception
{
final SMPPSession session = pooledObject.getObject();
Log.debug( "Destroying a pooled session with ID '{}'.", session.getSessionId() );
session.unbindAndClose();
}
@Override
public PooledObject<SMPPSession> wrap( SMPPSession smppSession )
{
return new DefaultPooledObject<>( smppSession );
}
}
/**
* Implementation of an Object pool that manages instances of SMPPSession. The intend of this pool is to have a
* single session, that's allowed to be idle for at least two minutes before being closed.
*
* The pool reacts to Openfire property changes, clearing all (inactive) sessions when a property used to create
* a session is modified. Note that sessions that are borrowed from the pool are not affected by such a change. When
* a property change occurs while a session is borrowed, a warning is logged (the property change will be applied
* when that session is eventually rotated out of the pool by the eviction strategy.
*
* @author Guus der Kinderen, guus.der.kinderen@gmail.com
*/
private static class SMPPSessionPool extends GenericObjectPool<SMPPSession> implements PropertyEventListener
{
private static final Logger Log = LoggerFactory.getLogger( SMPPSessionPool.class );
SMPPSessionPool()
{
super( new SMPPSessionFactory() );
setMaxTotal( JiveGlobals.getIntProperty( "sms.smpp.connections.maxAmount", 1 ) );
setNumTestsPerEvictionRun( getMaxTotal() );
setMinEvictableIdleTimeMillis( JiveGlobals.getLongProperty( "sms.smpp.connections.idleMillis", 1000 * 60 * 2 ) );
if ( getMinEvictableIdleTimeMillis() > 0 )
{
setTimeBetweenEvictionRunsMillis( getMinEvictableIdleTimeMillis() / 10 );
}
setTestOnBorrow( true );
setTestWhileIdle( true );
}
void processPropertyChange( String propertyName )
{
final Set<String> ofInterest = new HashSet<>();
ofInterest.add( "sms.smpp.host" );
ofInterest.add( "sms.smpp.port" );
ofInterest.add( "sms.smpp.systemId" );
ofInterest.add( "sms.smpp.password" );
ofInterest.add( "sms.smpp.systemType" );
ofInterest.add( "sms.smpp.receive.ton" );
ofInterest.add( "sms.smpp.receive.npi" );
if ( ofInterest.contains( propertyName ) )
{
Log.debug( "Property change for '{}' detected. Clearing all (inactive) sessions.", propertyName );
if ( getNumActive() > 0 )
{
// This can occur when an SMS is being sent while the property is being updated at the same time.
Log.warn( "Note that property change for '{}' will not affect one or more sessions that are currently actively used (although changes will be applied after the session is rotated out, due to time-based eviction)." );
}
clear();
}
// No need to clear the sessions for these properties:
if ( propertyName.equals( "sms.smpp.connections.maxAmount" ) )
{
setMaxTotal( JiveGlobals.getIntProperty( "sms.smpp.connections.maxAmount", 1 ) );
setNumTestsPerEvictionRun( getMaxTotal() );
}
if ( propertyName.equals( "sms.smpp.connections.idleMillis" ) )
{
setMinEvictableIdleTimeMillis( JiveGlobals.getLongProperty( "sms.smpp.connections.idleMillis", 1000 * 60 * 2 ) );
if ( getMinEvictableIdleTimeMillis() > 0 )
{
setTimeBetweenEvictionRunsMillis( getMinEvictableIdleTimeMillis() / 10 );
}
}
}
@Override
public void propertySet( String property, Map<String, Object> params )
{
processPropertyChange( property );
}
@Override
public void propertyDeleted( String property, Map<String, Object> params )
{
processPropertyChange( property );
}
@Override
public void xmlPropertySet( String property, Map<String, Object> params )
{
processPropertyChange( property );
}
@Override
public void xmlPropertyDeleted( String property, Map<String, Object> params )
{
processPropertyChange( property );
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.snapshots;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotIndexStatus;
import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.SnapshotsInProgress;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.MockBigArrays;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.repositories.RepositoryData;
import org.elasticsearch.repositories.ShardSnapshotResult;
import org.elasticsearch.snapshots.mockstore.MockRepository;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.containsString;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot;
import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshots;
import org.elasticsearch.index.snapshots.blobstore.SnapshotFiles;
import org.elasticsearch.repositories.IndexId;
import org.elasticsearch.repositories.RepositoryShardId;
import org.elasticsearch.repositories.blobstore.BlobStoreRepository;
import java.nio.file.Path;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0)
public class CloneSnapshotIT extends AbstractSnapshotIntegTestCase {
public void testShardClone() throws Exception {
internalCluster().startMasterOnlyNode();
internalCluster().startDataOnlyNode();
final String repoName = "repo-name";
final Path repoPath = randomRepoPath();
createRepository(repoName, "fs", repoPath);
final boolean useBwCFormat = randomBoolean();
if (useBwCFormat) {
initWithSnapshotVersion(repoName, repoPath, SnapshotsService.OLD_SNAPSHOT_FORMAT);
// Re-create repo to clear repository data cache
assertAcked(clusterAdmin().prepareDeleteRepository(repoName).get());
createRepository(repoName, "fs", repoPath);
}
final String indexName = "test-index";
createIndexWithRandomDocs(indexName, randomIntBetween(5, 10));
final String sourceSnapshot = "source-snapshot";
final SnapshotInfo sourceSnapshotInfo = createFullSnapshot(repoName, sourceSnapshot);
final BlobStoreRepository repository = getRepositoryOnMaster(repoName);
final RepositoryData repositoryData = getRepositoryData(repoName);
final IndexId indexId = repositoryData.resolveIndexId(indexName);
final int shardId = 0;
final RepositoryShardId repositoryShardId = new RepositoryShardId(indexId, shardId);
final SnapshotId targetSnapshotId = new SnapshotId("target-snapshot", UUIDs.randomBase64UUID(random()));
final String currentShardGen;
if (useBwCFormat) {
currentShardGen = null;
} else {
currentShardGen = repositoryData.shardGenerations().getShardGen(indexId, shardId);
}
final ShardSnapshotResult shardSnapshotResult = PlainActionFuture.get(f -> repository.cloneShardSnapshot(
sourceSnapshotInfo.snapshotId(), targetSnapshotId, repositoryShardId, currentShardGen, f));
final String newShardGeneration = shardSnapshotResult.getGeneration();
if (useBwCFormat) {
final long gen = Long.parseLong(newShardGeneration);
assertEquals(gen, 1L); // Initial snapshot brought it to 0, clone increments it to 1
}
final BlobStoreIndexShardSnapshot targetShardSnapshot = readShardSnapshot(repository, repositoryShardId, targetSnapshotId);
final BlobStoreIndexShardSnapshot sourceShardSnapshot =
readShardSnapshot(repository, repositoryShardId, sourceSnapshotInfo.snapshotId());
assertThat(targetShardSnapshot.incrementalFileCount(), is(0));
final List<BlobStoreIndexShardSnapshot.FileInfo> sourceFiles = sourceShardSnapshot.indexFiles();
final List<BlobStoreIndexShardSnapshot.FileInfo> targetFiles = targetShardSnapshot.indexFiles();
final int fileCount = sourceFiles.size();
assertEquals(fileCount, targetFiles.size());
for (int i = 0; i < fileCount; i++) {
assertTrue(sourceFiles.get(i).isSame(targetFiles.get(i)));
}
final BlobStoreIndexShardSnapshots shardMetadata = readShardGeneration(repository, repositoryShardId, newShardGeneration);
final List<SnapshotFiles> snapshotFiles = shardMetadata.snapshots();
assertThat(snapshotFiles, hasSize(2));
assertTrue(snapshotFiles.get(0).isSame(snapshotFiles.get(1)));
// verify that repeated cloning is idempotent
final ShardSnapshotResult shardSnapshotResult2 = PlainActionFuture.get(f -> repository.cloneShardSnapshot(
sourceSnapshotInfo.snapshotId(), targetSnapshotId, repositoryShardId, newShardGeneration, f));
assertEquals(newShardGeneration, shardSnapshotResult2.getGeneration());
assertEquals(shardSnapshotResult.getSegmentCount(), shardSnapshotResult2.getSegmentCount());
assertEquals(shardSnapshotResult.getSize(), shardSnapshotResult2.getSize());
}
public void testCloneSnapshotIndex() throws Exception {
internalCluster().startMasterOnlyNode();
internalCluster().startDataOnlyNode();
final String repoName = "repo-name";
createRepository(repoName, "fs");
final String indexName = "index-1";
createIndexWithRandomDocs(indexName, randomIntBetween(5, 10));
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
indexRandomDocs(indexName, randomIntBetween(20, 100));
if (randomBoolean()) {
assertAcked(admin().indices().prepareDelete(indexName));
}
final String targetSnapshot = "target-snapshot";
assertAcked(startClone(repoName, sourceSnapshot, targetSnapshot, indexName).get());
final List<SnapshotStatus> status = clusterAdmin().prepareSnapshotStatus(repoName)
.setSnapshots(sourceSnapshot, targetSnapshot).get().getSnapshots();
assertThat(status, hasSize(2));
final SnapshotIndexStatus status1 = status.get(0).getIndices().get(indexName);
final SnapshotIndexStatus status2 = status.get(1).getIndices().get(indexName);
assertEquals(status1.getStats().getTotalFileCount(), status2.getStats().getTotalFileCount());
assertEquals(status1.getStats().getTotalSize(), status2.getStats().getTotalSize());
}
public void testClonePreventsSnapshotDelete() throws Exception {
final String masterName = internalCluster().startMasterOnlyNode();
internalCluster().startDataOnlyNode();
final String repoName = "repo-name";
createRepository(repoName, "mock");
final String indexName = "index-1";
createIndexWithRandomDocs(indexName, randomIntBetween(5, 10));
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
indexRandomDocs(indexName, randomIntBetween(20, 100));
final String targetSnapshot = "target-snapshot";
blockNodeOnAnyFiles(repoName, masterName);
final ActionFuture<AcknowledgedResponse> cloneFuture = startClone(repoName, sourceSnapshot, targetSnapshot, indexName);
waitForBlock(masterName, repoName);
assertFalse(cloneFuture.isDone());
ConcurrentSnapshotExecutionException ex = expectThrows(ConcurrentSnapshotExecutionException.class,
() -> startDeleteSnapshot(repoName, sourceSnapshot).actionGet());
assertThat(ex.getMessage(), containsString("cannot delete snapshot while it is being cloned"));
unblockNode(repoName, masterName);
assertAcked(cloneFuture.get());
final List<SnapshotStatus> status = clusterAdmin().prepareSnapshotStatus(repoName)
.setSnapshots(sourceSnapshot, targetSnapshot).get().getSnapshots();
assertThat(status, hasSize(2));
final SnapshotIndexStatus status1 = status.get(0).getIndices().get(indexName);
final SnapshotIndexStatus status2 = status.get(1).getIndices().get(indexName);
assertEquals(status1.getStats().getTotalFileCount(), status2.getStats().getTotalFileCount());
assertEquals(status1.getStats().getTotalSize(), status2.getStats().getTotalSize());
}
public void testConcurrentCloneAndSnapshot() throws Exception {
internalCluster().startMasterOnlyNode();
final String dataNode = internalCluster().startDataOnlyNode();
final String repoName = "repo-name";
createRepository(repoName, "mock");
final String indexName = "index-1";
createIndexWithRandomDocs(indexName, randomIntBetween(5, 10));
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
indexRandomDocs(indexName, randomIntBetween(20, 100));
final String targetSnapshot = "target-snapshot";
final ActionFuture<CreateSnapshotResponse> snapshot2Future =
startFullSnapshotBlockedOnDataNode("snapshot-2", repoName, dataNode);
waitForBlock(dataNode, repoName);
final ActionFuture<AcknowledgedResponse> cloneFuture = startClone(repoName, sourceSnapshot, targetSnapshot, indexName);
awaitNumberOfSnapshotsInProgress(2);
unblockNode(repoName, dataNode);
assertAcked(cloneFuture.get());
assertSuccessful(snapshot2Future);
}
public void testLongRunningCloneAllowsConcurrentSnapshot() throws Exception {
// large snapshot pool so blocked snapshot threads from cloning don't prevent concurrent snapshot finalizations
final String masterNode = internalCluster().startMasterOnlyNode(LARGE_SNAPSHOT_POOL_SETTINGS);
internalCluster().startDataOnlyNode();
final String repoName = "test-repo";
createRepository(repoName, "mock");
final String indexSlow = "index-slow";
createIndexWithContent(indexSlow);
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
final String targetSnapshot = "target-snapshot";
blockMasterOnShardClone(repoName);
final ActionFuture<AcknowledgedResponse> cloneFuture = startClone(repoName, sourceSnapshot, targetSnapshot, indexSlow);
waitForBlock(masterNode, repoName);
final String indexFast = "index-fast";
createIndexWithRandomDocs(indexFast, randomIntBetween(20, 100));
assertSuccessful(clusterAdmin().prepareCreateSnapshot(repoName, "fast-snapshot")
.setIndices(indexFast).setWaitForCompletion(true).execute());
assertThat(cloneFuture.isDone(), is(false));
unblockNode(repoName, masterNode);
assertAcked(cloneFuture.get());
}
public void testLongRunningSnapshotAllowsConcurrentClone() throws Exception {
internalCluster().startMasterOnlyNode();
final String dataNode = internalCluster().startDataOnlyNode();
final String repoName = "test-repo";
createRepository(repoName, "mock");
final String indexSlow = "index-slow";
createIndexWithContent(indexSlow);
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
final String indexFast = "index-fast";
createIndexWithRandomDocs(indexFast, randomIntBetween(20, 100));
blockDataNode(repoName, dataNode);
final ActionFuture<CreateSnapshotResponse> snapshotFuture = clusterAdmin()
.prepareCreateSnapshot(repoName, "fast-snapshot").setIndices(indexFast).setWaitForCompletion(true).execute();
waitForBlock(dataNode, repoName);
final String targetSnapshot = "target-snapshot";
assertAcked(startClone(repoName, sourceSnapshot, targetSnapshot, indexSlow).get());
assertThat(snapshotFuture.isDone(), is(false));
unblockNode(repoName, dataNode);
assertSuccessful(snapshotFuture);
}
public void testDeletePreventsClone() throws Exception {
final String masterName = internalCluster().startMasterOnlyNode();
internalCluster().startDataOnlyNode();
final String repoName = "repo-name";
createRepository(repoName, "mock");
final String indexName = "index-1";
createIndexWithRandomDocs(indexName, randomIntBetween(5, 10));
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
indexRandomDocs(indexName, randomIntBetween(20, 100));
final String targetSnapshot = "target-snapshot";
blockNodeOnAnyFiles(repoName, masterName);
final ActionFuture<AcknowledgedResponse> deleteFuture = startDeleteSnapshot(repoName, sourceSnapshot);
waitForBlock(masterName, repoName);
assertFalse(deleteFuture.isDone());
ConcurrentSnapshotExecutionException ex = expectThrows(ConcurrentSnapshotExecutionException.class, () ->
startClone(repoName, sourceSnapshot, targetSnapshot, indexName).actionGet());
assertThat(ex.getMessage(), containsString("cannot clone from snapshot that is being deleted"));
unblockNode(repoName, masterName);
assertAcked(deleteFuture.get());
}
public void testBackToBackClonesForIndexNotInCluster() throws Exception {
// large snapshot pool so blocked snapshot threads from cloning don't prevent concurrent snapshot finalizations
final String masterNode = internalCluster().startMasterOnlyNode(LARGE_SNAPSHOT_POOL_SETTINGS);
internalCluster().startDataOnlyNode();
final String repoName = "test-repo";
createRepository(repoName, "mock");
final String indexBlocked = "index-blocked";
createIndexWithContent(indexBlocked);
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
assertAcked(admin().indices().prepareDelete(indexBlocked).get());
final String targetSnapshot1 = "target-snapshot";
blockMasterOnShardClone(repoName);
final ActionFuture<AcknowledgedResponse> cloneFuture1 = startClone(repoName, sourceSnapshot, targetSnapshot1, indexBlocked);
waitForBlock(masterNode, repoName);
assertThat(cloneFuture1.isDone(), is(false));
final int extraClones = randomIntBetween(1, 5);
final List<ActionFuture<AcknowledgedResponse>> extraCloneFutures = new ArrayList<>(extraClones);
final boolean slowInitClones = extraClones > 1 && randomBoolean();
if (slowInitClones) {
blockMasterOnReadIndexMeta(repoName);
}
for (int i = 0; i < extraClones; i++) {
extraCloneFutures.add(startClone(repoName, sourceSnapshot, "target-snapshot-" + i, indexBlocked));
}
awaitNumberOfSnapshotsInProgress(1 + extraClones);
for (ActionFuture<AcknowledgedResponse> extraCloneFuture : extraCloneFutures) {
assertFalse(extraCloneFuture.isDone());
}
final int extraSnapshots = randomIntBetween(0, 5);
if (extraSnapshots > 0) {
createIndexWithContent(indexBlocked);
}
final List<ActionFuture<CreateSnapshotResponse>> extraSnapshotFutures = new ArrayList<>(extraSnapshots);
for (int i = 0; i < extraSnapshots; i++) {
extraSnapshotFutures.add(startFullSnapshot(repoName, "extra-snap-" + i));
}
awaitNumberOfSnapshotsInProgress(1 + extraClones + extraSnapshots);
for (ActionFuture<CreateSnapshotResponse> extraSnapshotFuture : extraSnapshotFutures) {
assertFalse(extraSnapshotFuture.isDone());
}
unblockNode(repoName, masterNode);
assertAcked(cloneFuture1.get());
for (ActionFuture<AcknowledgedResponse> extraCloneFuture : extraCloneFutures) {
assertAcked(extraCloneFuture.get());
}
for (ActionFuture<CreateSnapshotResponse> extraSnapshotFuture : extraSnapshotFutures) {
assertSuccessful(extraSnapshotFuture);
}
}
public void testMasterFailoverDuringCloneStep1() throws Exception {
internalCluster().startMasterOnlyNodes(3);
internalCluster().startDataOnlyNode();
final String repoName = "test-repo";
createRepository(repoName, "mock");
final String testIndex = "index-test";
createIndexWithContent(testIndex);
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
blockMasterOnReadIndexMeta(repoName);
final String cloneName = "target-snapshot";
final ActionFuture<AcknowledgedResponse> cloneFuture =
startCloneFromDataNode(repoName, sourceSnapshot, cloneName, testIndex);
awaitNumberOfSnapshotsInProgress(1);
final String masterNode = internalCluster().getMasterName();
waitForBlock(masterNode, repoName);
internalCluster().restartNode(masterNode);
boolean cloneSucceeded = false;
try {
cloneFuture.actionGet(TimeValue.timeValueSeconds(30L));
cloneSucceeded = true;
} catch (SnapshotException sne) {
// ignored, most of the time we will throw here but we could randomly run into a situation where the data node retries the
// snapshot on disconnect slowly enough for it to work out
}
awaitNoMoreRunningOperations();
// Check if the clone operation worked out by chance as a result of the clone request being retried because of the master failover
cloneSucceeded = cloneSucceeded ||
getRepositoryData(repoName).getSnapshotIds().stream().anyMatch(snapshotId -> snapshotId.getName().equals(cloneName));
assertAllSnapshotsSuccessful(getRepositoryData(repoName), cloneSucceeded ? 2 : 1);
}
public void testFailsOnCloneMissingIndices() {
internalCluster().startMasterOnlyNode();
internalCluster().startDataOnlyNode();
final String repoName = "repo-name";
final Path repoPath = randomRepoPath();
if (randomBoolean()) {
createIndexWithContent("test-idx");
}
createRepository(repoName, "fs", repoPath);
final String snapshotName = "snapshot";
createFullSnapshot(repoName, snapshotName);
expectThrows(IndexNotFoundException.class,
() -> startClone(repoName, snapshotName, "target-snapshot", "does-not-exist").actionGet());
}
public void testMasterFailoverDuringCloneStep2() throws Exception {
// large snapshot pool so blocked snapshot threads from cloning don't prevent concurrent snapshot finalizations
internalCluster().startMasterOnlyNodes(3, LARGE_SNAPSHOT_POOL_SETTINGS);
internalCluster().startDataOnlyNode();
final String repoName = "test-repo";
createRepository(repoName, "mock");
final String testIndex = "index-test";
createIndexWithContent(testIndex);
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
final String targetSnapshot = "target-snapshot";
blockMasterOnShardClone(repoName);
final ActionFuture<AcknowledgedResponse> cloneFuture = startCloneFromDataNode(repoName, sourceSnapshot, targetSnapshot, testIndex);
awaitNumberOfSnapshotsInProgress(1);
final String masterNode = internalCluster().getMasterName();
waitForBlock(masterNode, repoName);
internalCluster().restartNode(masterNode);
expectThrows(SnapshotException.class, cloneFuture::actionGet);
awaitNoMoreRunningOperations();
assertAllSnapshotsSuccessful(getRepositoryData(repoName), 2);
}
public void testExceptionDuringShardClone() throws Exception {
// large snapshot pool so blocked snapshot threads from cloning don't prevent concurrent snapshot finalizations
internalCluster().startMasterOnlyNodes(3, LARGE_SNAPSHOT_POOL_SETTINGS);
internalCluster().startDataOnlyNode();
final String repoName = "test-repo";
createRepository(repoName, "mock");
final String testIndex = "index-test";
createIndexWithContent(testIndex);
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
final String targetSnapshot = "target-snapshot";
blockMasterFromFinalizingSnapshotOnSnapFile(repoName);
final ActionFuture<AcknowledgedResponse> cloneFuture = startCloneFromDataNode(repoName, sourceSnapshot, targetSnapshot, testIndex);
awaitNumberOfSnapshotsInProgress(1);
final String masterNode = internalCluster().getMasterName();
waitForBlock(masterNode, repoName);
unblockNode(repoName, masterNode);
expectThrows(SnapshotException.class, cloneFuture::actionGet);
awaitNoMoreRunningOperations();
assertAllSnapshotsSuccessful(getRepositoryData(repoName), 1);
assertAcked(startDeleteSnapshot(repoName, sourceSnapshot).get());
}
public void testDoesNotStartOnBrokenSourceSnapshot() throws Exception {
internalCluster().startMasterOnlyNode();
final String dataNode = internalCluster().startDataOnlyNode();
final String repoName = "test-repo";
createRepository(repoName, "mock");
final String testIndex = "index-test";
createIndexWithContent(testIndex);
final String sourceSnapshot = "source-snapshot";
blockDataNode(repoName, dataNode);
final Client masterClient = internalCluster().masterClient();
final ActionFuture<CreateSnapshotResponse> sourceSnapshotFuture = masterClient.admin().cluster()
.prepareCreateSnapshot(repoName, sourceSnapshot).setWaitForCompletion(true).execute();
awaitNumberOfSnapshotsInProgress(1);
waitForBlock(dataNode, repoName);
internalCluster().restartNode(dataNode);
assertThat(sourceSnapshotFuture.get().getSnapshotInfo().state(), is(SnapshotState.PARTIAL));
final SnapshotException sne = expectThrows(SnapshotException.class, () -> startClone(masterClient, repoName, sourceSnapshot,
"target-snapshot", testIndex).actionGet(TimeValue.timeValueSeconds(30L)));
assertThat(sne.getMessage(), containsString("Can't clone index [" + getRepositoryData(repoName).resolveIndexId(testIndex) +
"] because its snapshot was not successful."));
}
public void testStartSnapshotWithSuccessfulShardClonePendingFinalization() throws Exception {
final String masterName = internalCluster().startMasterOnlyNode(LARGE_SNAPSHOT_POOL_SETTINGS);
final String dataNode = internalCluster().startDataOnlyNode();
final String repoName = "test-repo";
createRepository(repoName, "mock");
final String indexName = "test-idx";
createIndexWithContent(indexName);
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
blockMasterOnWriteIndexFile(repoName);
final String cloneName = "clone-blocked";
final ActionFuture<AcknowledgedResponse> blockedClone = startClone(repoName, sourceSnapshot, cloneName, indexName);
waitForBlock(masterName, repoName);
awaitNumberOfSnapshotsInProgress(1);
blockNodeOnAnyFiles(repoName, dataNode);
final ActionFuture<CreateSnapshotResponse> otherSnapshot = startFullSnapshot(repoName, "other-snapshot");
awaitNumberOfSnapshotsInProgress(2);
assertFalse(blockedClone.isDone());
unblockNode(repoName, masterName);
awaitNumberOfSnapshotsInProgress(1);
awaitMasterFinishRepoOperations();
unblockNode(repoName, dataNode);
assertAcked(blockedClone.get());
assertEquals(getSnapshot(repoName, cloneName).state(), SnapshotState.SUCCESS);
assertSuccessful(otherSnapshot);
}
public void testStartCloneWithSuccessfulShardClonePendingFinalization() throws Exception {
final String masterName = internalCluster().startMasterOnlyNode();
internalCluster().startDataOnlyNode();
final String repoName = "test-repo";
createRepository(repoName, "mock");
final String indexName = "test-idx";
createIndexWithContent(indexName);
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
blockMasterOnWriteIndexFile(repoName);
final String cloneName = "clone-blocked";
final ActionFuture<AcknowledgedResponse> blockedClone = startClone(repoName, sourceSnapshot, cloneName, indexName);
waitForBlock(masterName, repoName);
awaitNumberOfSnapshotsInProgress(1);
final String otherCloneName = "other-clone";
final ActionFuture<AcknowledgedResponse> otherClone = startClone(repoName, sourceSnapshot, otherCloneName, indexName);
awaitNumberOfSnapshotsInProgress(2);
assertFalse(blockedClone.isDone());
unblockNode(repoName, masterName);
awaitNoMoreRunningOperations(masterName);
awaitMasterFinishRepoOperations();
assertAcked(blockedClone.get());
assertAcked(otherClone.get());
assertEquals(getSnapshot(repoName, cloneName).state(), SnapshotState.SUCCESS);
assertEquals(getSnapshot(repoName, otherCloneName).state(), SnapshotState.SUCCESS);
}
public void testStartCloneWithSuccessfulShardSnapshotPendingFinalization() throws Exception {
final String masterName = internalCluster().startMasterOnlyNode(LARGE_SNAPSHOT_POOL_SETTINGS);
internalCluster().startDataOnlyNode();
final String repoName = "test-repo";
createRepository(repoName, "mock");
final String indexName = "test-idx";
createIndexWithContent(indexName);
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
blockMasterOnWriteIndexFile(repoName);
final ActionFuture<CreateSnapshotResponse> blockedSnapshot = startFullSnapshot(repoName, "snap-blocked");
waitForBlock(masterName, repoName);
awaitNumberOfSnapshotsInProgress(1);
final String cloneName = "clone";
final ActionFuture<AcknowledgedResponse> clone = startClone(repoName, sourceSnapshot, cloneName, indexName);
logger.info("--> wait for clone to start fully with shards assigned in the cluster state");
try {
awaitClusterState(clusterState -> {
final List<SnapshotsInProgress.Entry> entries =
clusterState.custom(SnapshotsInProgress.TYPE, SnapshotsInProgress.EMPTY).entries();
return entries.size() == 2 && entries.get(1).clones().isEmpty() == false;
});
assertFalse(blockedSnapshot.isDone());
} finally {
unblockNode(repoName, masterName);
}
awaitNoMoreRunningOperations();
awaitMasterFinishRepoOperations();
assertSuccessful(blockedSnapshot);
assertAcked(clone.get());
assertEquals(getSnapshot(repoName, cloneName).state(), SnapshotState.SUCCESS);
}
public void testStartCloneDuringRunningDelete() throws Exception {
final String masterName = internalCluster().startMasterOnlyNode(LARGE_SNAPSHOT_POOL_SETTINGS);
internalCluster().startDataOnlyNode();
final String repoName = "test-repo";
createRepository(repoName, "mock");
final String indexName = "test-idx";
createIndexWithContent(indexName);
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
final List<String> snapshotNames = createNSnapshots(repoName, randomIntBetween(1, 5));
blockMasterOnWriteIndexFile(repoName);
final ActionFuture<AcknowledgedResponse> deleteFuture = startDeleteSnapshot(repoName, randomFrom(snapshotNames));
waitForBlock(masterName, repoName);
awaitNDeletionsInProgress(1);
final ActionFuture<AcknowledgedResponse> cloneFuture = startClone(repoName, sourceSnapshot, "target-snapshot", indexName);
logger.info("--> waiting for snapshot clone to be fully initialized");
awaitClusterState(state -> {
for (SnapshotsInProgress.Entry entry : state.custom(SnapshotsInProgress.TYPE, SnapshotsInProgress.EMPTY).entries()) {
if (entry.clones().isEmpty() == false) {
assertEquals(sourceSnapshot, entry.source().getName());
for (ObjectCursor<SnapshotsInProgress.ShardSnapshotStatus> value : entry.clones().values()) {
assertSame(value.value, SnapshotsInProgress.ShardSnapshotStatus.UNASSIGNED_QUEUED);
}
return true;
}
}
return false;
});
unblockNode(repoName, masterName);
assertAcked(deleteFuture.get());
assertAcked(cloneFuture.get());
}
public void testManyConcurrentClonesStartOutOfOrder() throws Exception {
// large snapshot pool to allow for concurrently finishing clone while another clone is blocked on trying to load SnapshotInfo
final String masterName = internalCluster().startMasterOnlyNode(LARGE_SNAPSHOT_POOL_SETTINGS);
internalCluster().startDataOnlyNode();
final String repoName = "test-repo";
createRepository(repoName, "mock");
final String testIndex = "test-idx";
createIndexWithContent(testIndex);
final String sourceSnapshot = "source-snapshot";
createFullSnapshot(repoName, sourceSnapshot);
assertAcked(admin().indices().prepareDelete(testIndex).get());
final MockRepository repo = getRepositoryOnMaster(repoName);
repo.setBlockOnceOnReadSnapshotInfoIfAlreadyBlocked();
repo.setBlockOnWriteIndexFile();
final ActionFuture<AcknowledgedResponse> clone1 = startClone(repoName, sourceSnapshot, "target-snapshot-1", testIndex);
// wait for this snapshot to show up in the cluster state
awaitNumberOfSnapshotsInProgress(1);
waitForBlock(masterName, repoName);
final ActionFuture<AcknowledgedResponse> clone2 = startClone(repoName, sourceSnapshot, "target-snapshot-2", testIndex);
awaitNumberOfSnapshotsInProgress(2);
awaitClusterState(state -> state.custom(SnapshotsInProgress.TYPE, SnapshotsInProgress.EMPTY)
.entries().stream().anyMatch(entry -> entry.state().completed()));
repo.unblock();
assertAcked(clone1.get());
assertAcked(clone2.get());
}
private ActionFuture<AcknowledgedResponse> startCloneFromDataNode(String repoName, String sourceSnapshot, String targetSnapshot,
String... indices) {
return startClone(dataNodeClient(), repoName, sourceSnapshot, targetSnapshot, indices);
}
private ActionFuture<AcknowledgedResponse> startClone(String repoName, String sourceSnapshot, String targetSnapshot,
String... indices) {
return startClone(client(), repoName, sourceSnapshot, targetSnapshot, indices);
}
private static ActionFuture<AcknowledgedResponse> startClone(Client client, String repoName, String sourceSnapshot,
String targetSnapshot, String... indices) {
return client.admin().cluster().prepareCloneSnapshot(repoName, sourceSnapshot, targetSnapshot).setIndices(indices).execute();
}
private void blockMasterOnReadIndexMeta(String repoName) {
AbstractSnapshotIntegTestCase.<MockRepository>getRepositoryOnMaster(repoName).setBlockOnReadIndexMeta();
}
private void blockMasterOnShardClone(String repoName) {
AbstractSnapshotIntegTestCase.<MockRepository>getRepositoryOnMaster(repoName).setBlockOnWriteShardLevelMeta();
}
/**
* Assert that given {@link RepositoryData} contains exactly the given number of snapshots and all of them are successful.
*/
private static void assertAllSnapshotsSuccessful(RepositoryData repositoryData, int successfulSnapshotCount) {
final Collection<SnapshotId> snapshotIds = repositoryData.getSnapshotIds();
assertThat(snapshotIds, hasSize(successfulSnapshotCount));
for (SnapshotId snapshotId : snapshotIds) {
assertThat(repositoryData.getSnapshotState(snapshotId), is(SnapshotState.SUCCESS));
}
}
private static BlobStoreIndexShardSnapshots readShardGeneration(BlobStoreRepository repository, RepositoryShardId repositoryShardId,
String generation) {
return PlainActionFuture.get(f -> repository.threadPool().generic().execute(ActionRunnable.supply(f,
() -> BlobStoreRepository.INDEX_SHARD_SNAPSHOTS_FORMAT.read(repository.shardContainer(repositoryShardId.index(),
repositoryShardId.shardId()), generation, NamedXContentRegistry.EMPTY, MockBigArrays.NON_RECYCLING_INSTANCE))));
}
private static BlobStoreIndexShardSnapshot readShardSnapshot(BlobStoreRepository repository, RepositoryShardId repositoryShardId,
SnapshotId snapshotId) {
return PlainActionFuture.get(f -> repository.threadPool().generic().execute(ActionRunnable.supply(f,
() -> repository.loadShardSnapshot(repository.shardContainer(repositoryShardId.index(), repositoryShardId.shardId()),
snapshotId))));
}
}
| |
/*
* The Apache Software License, Version 1.1
*
* Copyright (c) 1999, 2000 The Apache Software Foundation. All rights
* reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The end-user documentation included with the redistribution, if
* any, must include the following acknowlegement:
* "This product includes software developed by the
* Apache Software Foundation (http://www.apache.org/)."
* Alternately, this acknowlegement may appear in the software itself,
* if and wherever such third-party acknowlegements normally appear.
*
* 4. The names "Ant" and "Apache Software
* Foundation" must not be used to endorse or promote products derived
* from this software without prior written permission. For written
* permission, please contact apache@apache.org.
*
* 5. Products derived from this software may not be called "Apache"
* nor may "Apache" appear in their names without prior written
* permission of the Apache Group.
*
* THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*/
package org.apache.tools.ant.gui.xml;
import org.w3c.dom.Attr;
import org.w3c.dom.CDATASection;
import org.w3c.dom.Comment;
import org.w3c.dom.DOMException;
import org.w3c.dom.DOMImplementation;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentFragment;
import org.w3c.dom.DocumentType;
import org.w3c.dom.EntityReference;
import org.w3c.dom.ProcessingInstruction;
import org.w3c.dom.Text;
/**
* <code>DOMDocument</code> represents an abstraction of
* <code>org.w3c.dom.Document</code>.
*
* @version $Revision: 1.1.1.1 $
* @author Nick Davis<a href="mailto:nick_home_account@yahoo.com">nick_home_account@yahoo.com</a>
*/
public class DOMDocument {
private Document _impl;
private DOMNodeFactory _factory;
private DOMNodeContainer _container;
/** true if any node in the document has been modfied */
private boolean _modified = false;
/**
* Creates new DOMDocument
*/
public DOMDocument() {
}
/**
* @return the node factory
*/
public DOMNodeFactory getFactory() {
return _factory;
}
/**
* @param factory the node factory
*/
void setFactory(DOMNodeFactory factory) {
_factory = factory;
}
/**
* @return the node container
*/
public DOMNodeContainer getContainer() {
return _container;
}
/**
* @param container the node container
*/
void setContainer(DOMNodeContainer container) {
_container = container;
}
/**
* Sets the node implementation.
*
* @param impl the <code>org.w3c.dom.Document</code> object
*/
public void setImpl(Document impl) {
_impl = impl;
}
/**
* Pass call to the implementaion
* @return the document type
*/
public DocumentType getDoctype() {
return _impl.getDoctype();
}
/**
* Passes the call to the implementation
*/
public ProcessingInstruction createProcessingInstruction(String p1,String p2)
throws DOMException {
return _impl.createProcessingInstruction(p1, p2);
}
/**
* Passes the call to the implementation
*/
public EntityReference createEntityReference(String p1)
throws DOMException {
return _impl.createEntityReference(p1);
}
/**
* Passes the call to the implementation
*/
public Text createTextNode(String p1) {
return _impl.createTextNode(p1);
}
/**
* Passes the call to the implementation
*/
public CDATASection createCDATASection(String p1) throws DOMException {
return _impl.createCDATASection(p1);
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned element.
* @return a <code>DOMElement</code> object
*/
public DOMElement getDocumentElement() {
return (DOMElement) _factory.createDOMNode(_impl.getDocumentElement());
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>Implementation</code>.
* @return a <code>DOMImplementation</code> object
*/
public DOMImplementation getImplementation() {
return _impl.getImplementation();
}
/**
* Passes the call to the implementation
*/
public Attr createAttribute(String p1) throws DOMException {
return _impl.createAttribute(p1);
}
/**
* Passes the call to the implementation
*/
public Comment createComment(String p1) {
return _impl.createComment(p1);
}
/**
* Passes the call to the implementation
*/
public DocumentFragment createDocumentFragment() {
return _impl.createDocumentFragment();
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>NodeList</code>.
* @return a <code>DOMNodeList</code> object
*/
public DOMNodeList getElementsByTagName(String p1) {
return new DOMNodeList(_factory, _impl.getElementsByTagName(p1));
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>Element</code>.
* @return a <code>DOMElement</code> object
*/
public DOMElement createElement(String p1) throws DOMException {
return (DOMElement) _factory.createDOMNode(_impl.createElement(p1));
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>Node</code>.
* @return a <code>DOMNode</code> object
*/
public DOMNode getPreviousSibling() {
return _factory.createDOMNode(_impl.getPreviousSibling());
}
/**
* Passes the call to the implementation.
*/
public void setNodeValue(String p1) throws DOMException {
_impl.setNodeValue(p1);
}
/**
* Passes the call to the implementation.
*/
public String getNodeValue() throws DOMException {
return _impl.getNodeValue();
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>Node</code>.
* @return a <code>DOMNode</code> object
*/
public DOMNode insertBefore(DOMNode p1, DOMNode p2) throws DOMException {
return _factory.createDOMNode(
_impl.insertBefore(p1.getImpl(), p2.getImpl()));
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>Node</code>.
* @return a <code>DOMNode</code> object
*/
public DOMNode getParentNode() {
return _factory.createDOMNode(_impl.getParentNode());
}
/**
* Passes the call to the implementation.
*/
public boolean hasChildNodes() {
return _impl.hasChildNodes();
}
/**
* Passes the call to the implementation.
*/
public String getNodeName() {
return _impl.getNodeName();
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>NamedNodeMap</code>.
* @return a <code>NamedDOMNodeMap</code> object
*/
public NamedDOMNodeMap getAttributes() {
return new NamedDOMNodeMap (_factory, _impl.getAttributes());
}
/**
* Passes the call to the implementation.
*/
public short getNodeType() {
return _impl.getNodeType();
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>Node</code>.
* @return a <code>DOMNode</code> object
*/
public DOMNode removeChild(DOMNode p1) throws DOMException {
return _factory.createDOMNode(
_impl.removeChild(p1.getImpl()));
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>Node</code>.
* @return a <code>DOMNode</code> object
*/
public DOMNode appendChild(DOMNode p1) throws DOMException {
return _factory.createDOMNode(
_impl.appendChild(p1.getImpl()));
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>Node</code>.
* @return a <code>DOMNode</code> object
*/
public DOMNode getNextSibling() {
return _factory.createDOMNode(_impl.getNextSibling());
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>Node</code>.
* @return a <code>DOMNode</code> object
*/
public DOMNode getLastChild() {
return _factory.createDOMNode(_impl.getLastChild());
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>NodeList</code>.
* @return a <code>DOMNodeList</code> object
*/
public DOMNodeList getChildNodes() {
return new DOMNodeList (_factory, _impl.getChildNodes());
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>Node</code>.
* @return a <code>DOMNode</code> object
*/
public DOMNode getFirstChild() {
return _factory.createDOMNode(_impl.getFirstChild());
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>Node</code>.
* @return a <code>DOMNode</code> object
*/
public DOMNode cloneNode(boolean p1) {
return _factory.createDOMNode(_impl.cloneNode(p1));
}
/**
* Passes the call to the implementation.
*/
public Document getOwnerDocument() {
return _impl.getOwnerDocument();
}
/**
* Passes the call to the implementation and returns an abstraction
* of the returned <code>Node</code>.
* @return a <code>DOMNode</code> object
*/
public DOMNode replaceChild(DOMNode p1, DOMNode p2) throws DOMException {
return _factory.createDOMNode(
_impl.replaceChild(p1.getImpl(), p2.getImpl()));
}
/**
* @return true if any node in the document has been modified
*/
public boolean isModified() {
return _modified;
}
/**
* Set the modified flag
*
* @param modified the new value
*/
public void setModified(boolean modified) {
_modified = modified;
}
}
| |
package voldemort.store.bdb.stats;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicLong;
import voldemort.VoldemortException;
import voldemort.annotations.jmx.JmxGetter;
import voldemort.annotations.jmx.JmxOperation;
import voldemort.utils.CachedCallable;
import voldemort.utils.Utils;
import com.sleepycat.je.Database;
import com.sleepycat.je.DatabaseException;
import com.sleepycat.je.DatabaseStats;
import com.sleepycat.je.Environment;
import com.sleepycat.je.EnvironmentConfig;
import com.sleepycat.je.EnvironmentFailureException;
import com.sleepycat.je.EnvironmentStats;
import com.sleepycat.je.LockTimeoutException;
import com.sleepycat.je.StatsConfig;
public class BdbEnvironmentStats {
// Don't fetch entry count/btree stats more than twice a day
private final static long INVASIVE_STATS_TTL_MS = 12 * 3600 * 1000;
private final Environment environment;
private final Database database;
private final CachedCallable<EnvironmentStats> fastStats;
private final CachedCallable<SpaceUtilizationStats> fastSpaceStats;
private final CachedCallable<Long> entryCount;
private final CachedCallable<DatabaseStats> btreeStats;
private final boolean exposeSpaceStats;
private final AtomicLong numExceptions;
private final AtomicLong numLockTimeoutExceptions;
private final AtomicLong numEnvironmentFailureExceptions;
public BdbEnvironmentStats(Environment environment,
Database database,
long ttlMs,
boolean exposeSpaceUtil) {
this.environment = environment;
this.database = database;
this.exposeSpaceStats = exposeSpaceUtil;
Callable<EnvironmentStats> fastStatsCallable = new Callable<EnvironmentStats>() {
public EnvironmentStats call() throws Exception {
return getEnvironmentStats(true);
}
};
fastStats = new CachedCallable<EnvironmentStats>(fastStatsCallable, ttlMs);
Callable<SpaceUtilizationStats> fastDbStatsCallable = new Callable<SpaceUtilizationStats>() {
public SpaceUtilizationStats call() throws Exception {
return getSpaceUtilizationStats();
}
};
fastSpaceStats = new CachedCallable<SpaceUtilizationStats>(fastDbStatsCallable, ttlMs);
Callable<Long> entryCountCallable = new Callable<Long>() {
public Long call() throws Exception {
return getEntryCountUncached();
}
};
entryCount = new CachedCallable<Long>(entryCountCallable, INVASIVE_STATS_TTL_MS);
Callable<DatabaseStats> btreeStatsCallable = new Callable<DatabaseStats>() {
public DatabaseStats call() throws Exception {
return getBtreeStatsUncached();
}
};
btreeStats = new CachedCallable<DatabaseStats>(btreeStatsCallable, INVASIVE_STATS_TTL_MS);
numExceptions = new AtomicLong(0);
numLockTimeoutExceptions = new AtomicLong(0);
numEnvironmentFailureExceptions = new AtomicLong(0);
}
private EnvironmentStats getEnvironmentStats(boolean fast) {
StatsConfig config = new StatsConfig();
config.setFast(fast);
return environment.getStats(config);
}
private SpaceUtilizationStats getSpaceUtilizationStats() {
return new SpaceUtilizationStats(environment);
}
private SpaceUtilizationStats getFastSpaceUtilizationStats() {
try {
return fastSpaceStats.call();
} catch(Exception e) {
throw new VoldemortException(e);
}
}
private EnvironmentStats getFastStats() {
try {
return fastStats.call();
} catch(Exception e) {
throw new VoldemortException(e);
}
}
private Long getEntryCountUncached() {
return database.count();
}
public DatabaseStats getBtreeStatsUncached() throws Exception {
// fast stats does not provide detailed Btree structure.
// This is invasive and will affect performance.
return database.getStats(new StatsConfig().setFast(false));
}
public void reportException(DatabaseException de) {
numExceptions.incrementAndGet();
if(de instanceof LockTimeoutException) {
numLockTimeoutExceptions.incrementAndGet();
} else if(de instanceof EnvironmentFailureException) {
numEnvironmentFailureExceptions.incrementAndGet();
}
}
@JmxGetter(name = "FastStatsAsString")
public String getFastStatsAsString() {
return getFastStats().toString();
}
// 1. Caching
@JmxGetter(name = "NumCacheMiss")
public long getNumCacheMiss() {
return getFastStats().getNCacheMiss();
}
@JmxGetter(name = "NumNotResident")
public long getNumNotResident() {
return getFastStats().getNNotResident();
}
@JmxGetter(name = "TotalCacheSize")
public long getTotalCacheSize() {
return getFastStats().getSharedCacheTotalBytes();
}
@JmxGetter(name = "AllotedCacheSize")
public long getAllotedCacheSize() {
return getFastStats().getCacheTotalBytes();
}
@JmxGetter(name = "EvictionPasses")
public long getEvictedLNs() {
return getFastStats().getNEvictPasses();
}
@JmxGetter(name = "BINFetches")
public long getBINFetches() {
return getFastStats().getNBINsFetch();
}
@JmxGetter(name = "BINFetchMisses")
public long getBINFetchMisses() {
return getFastStats().getNBINsFetchMiss();
}
@JmxGetter(name = "INFetches")
public long getINFetches() {
return getFastStats().getNUpperINsFetch();
}
@JmxGetter(name = "INFetchMisses")
public long getINFetchMisses() {
return getFastStats().getNUpperINsFetchMiss();
}
@JmxGetter(name = "LNFetches")
public long getLNFetches() {
return getFastStats().getNLNsFetch();
}
@JmxGetter(name = "LNFetchMisses")
public long getLNFetchMisses() {
return getFastStats().getNLNsFetchMiss();
}
@JmxGetter(name = "CachedBINs")
public long getCachedBINs() {
return getFastStats().getNCachedBINs();
}
@JmxGetter(name = "CachedINs")
public long getCachedUpperINs() {
return getFastStats().getNCachedUpperINs();
}
@JmxGetter(name = "EvictedBINs")
public long getEvictedBINs() {
EnvironmentStats stats = getFastStats();
return stats.getNBINsEvictedCacheMode() + stats.getNBINsEvictedCritical()
+ stats.getNBINsEvictedDaemon() + stats.getNBINsEvictedManual();
}
@JmxGetter(name = "EvictedINs")
public long getEvictedINs() {
EnvironmentStats stats = getFastStats();
return stats.getNUpperINsEvictedCacheMode() + stats.getNUpperINsEvictedCritical()
+ stats.getNUpperINsEvictedDaemon() + stats.getNUpperINsEvictedManual();
}
// 2. IO
@JmxGetter(name = "NumRandomWrites")
public long getNumRandomWrites() {
return getFastStats().getNRandomWrites();
}
@JmxGetter(name = "NumRandomWriteBytes")
public long getNumRandomWriteBytes() {
return getFastStats().getNRandomWriteBytes();
}
@JmxGetter(name = "NumRandomReads")
public long getNumRandomReads() {
return getFastStats().getNRandomReads();
}
@JmxGetter(name = "NumRandomReadBytes")
public long getNumRandomReadBytes() {
return getFastStats().getNRandomReadBytes();
}
@JmxGetter(name = "NumSequentialWrites")
public long getNumSequentialWrites() {
return getFastStats().getNSequentialWrites();
}
@JmxGetter(name = "NumSequentialWriteBytes")
public long getNumSequentialWriteBytes() {
return getFastStats().getNSequentialWriteBytes();
}
@JmxGetter(name = "NumSequentialReads")
public long getNumSequentialReads() {
return getFastStats().getNSequentialReads();
}
@JmxGetter(name = "NumSequentialReadBytes")
public long getNumSequentialReadBytes() {
return getFastStats().getNSequentialReadBytes();
}
@JmxGetter(name = "NumFSyncs")
public long getNumFSyncs() {
return getFastStats().getNFSyncs();
}
// 3. Cleaning & Checkpointing
@JmxGetter(name = "NumCleanerEntriesRead")
public long getNumCleanerEntriesRead() {
return getFastStats().getNCleanerEntriesRead();
}
@JmxGetter(name = "FileDeletionBacklog")
public long getFileDeletionBacklog() {
return getFastStats().getFileDeletionBacklog();
}
@JmxGetter(name = "FileDeletionBacklogBytes")
public long getFileDeletionBacklogBytes() {
String logFileMaxStr = environment.getConfig()
.getConfigParam(EnvironmentConfig.LOG_FILE_MAX);
long logFileMax = Long.parseLong(logFileMaxStr);
return getFileDeletionBacklog() * logFileMax;
}
@JmxGetter(name = "CleanerBacklog")
public long getCleanerBacklog() {
return getFastStats().getCleanerBacklog();
}
@JmxGetter(name = "NumCleanerRuns")
public long getNumCleanerRuns() {
return getFastStats().getNCleanerRuns();
}
@JmxGetter(name = "NumCleanerDeletions")
public long getNumCleanerDeletions() {
return getFastStats().getNCleanerDeletions();
}
@JmxGetter(name = "NumCheckpoints")
public long getNumCheckpoints() {
return getFastStats().getNCheckpoints();
}
@JmxGetter(name = "TotalSpace")
public long getTotalSpace() {
if(this.exposeSpaceStats)
return getFastSpaceUtilizationStats().getTotalSpaceUsed();
else
return 0;
}
@JmxGetter(name = "TotalSpaceUtilized")
public long getTotalSpaceUtilized() {
if(this.exposeSpaceStats)
return getFastSpaceUtilizationStats().getTotalSpaceUtilized();
else
return 0;
}
@JmxGetter(name = "UtilizationSummary", description = "Displays the disk space utilization for an environment.")
public String getUtilizationSummaryAsString() {
return getFastSpaceUtilizationStats().getSummariesAsString();
}
// 4. Latching/Locking
@JmxGetter(name = "BtreeLatches")
public long getBtreeLatches() {
return getFastStats().getRelatchesRequired();
}
@JmxGetter(name = "NumAcquiresWithContention")
public long getNumAcquiresWithContention() {
return getFastStats().getNAcquiresWithContention();
}
@JmxGetter(name = "NumAcquiresNoWaiters")
public long getNumAcquiresNoWaiters() {
return getFastStats().getNAcquiresNoWaiters();
}
// 5. Exceptions & general statistics
@JmxGetter(name = "numExceptions")
public long getNumExceptions() {
return numExceptions.longValue();
}
@JmxGetter(name = "numLockTimeoutExceptions")
public long getNumLockTimeoutExceptions() {
return numLockTimeoutExceptions.longValue();
}
@JmxGetter(name = "numEnvironmentFailureExceptions")
public long getNumEnvironmentFailureExceptions() {
return numEnvironmentFailureExceptions.longValue();
}
@JmxOperation(description = "Obtain the number of k-v entries in the store")
public long getEntryCount() throws Exception {
return entryCount.call();
}
@JmxOperation(description = "Obtain statistics about the BTree Index for a store")
public String getBtreeStats() throws Exception {
return btreeStats.call().toString();
}
// Compound statistics derived from raw statistics
@JmxGetter(name = "NumWritesTotal")
public long getNumWritesTotal() {
return getNumRandomWrites() + getNumSequentialWrites();
}
@JmxGetter(name = "NumWriteBytesTotal")
public long getNumWriteBytesTotal() {
return getNumSequentialWriteBytes() + getNumRandomWriteBytes();
}
@JmxGetter(name = "NumReadsTotal")
public long getNumReadsTotal() {
return getNumRandomReads() + getNumSequentialReads();
}
@JmxGetter(name = "NumReadBytesTotal")
public long getNumReadBytesTotal() {
return getNumRandomReadBytes() + getNumSequentialReadBytes();
}
@JmxGetter(name = "PercentRandomWrites")
public double getPercentRandomWrites() {
return Utils.safeGetPercentage(getNumRandomWrites(), getNumWritesTotal());
}
@JmxGetter(name = "PercentageRandomWriteBytes")
public double getPercentageRandomWriteBytes() {
return Utils.safeGetPercentage(getNumRandomWriteBytes(), getNumRandomWriteBytes()
+ getNumSequentialWriteBytes());
}
@JmxGetter(name = "PercentageRandomReads")
public double getPercentageRandomReads() {
return Utils.safeGetPercentage(getNumRandomReads(), getNumReadsTotal());
}
@JmxGetter(name = "PercentageRandomReadBytes")
public double getPercentageRandomReadBytes() {
return Utils.safeGetPercentage(getNumRandomWriteBytes(), getNumRandomReadBytes()
+ getNumSequentialReadBytes());
}
@JmxGetter(name = "PercentageReads")
public double getPercentageReads() {
return Utils.safeGetPercentage(getNumReadsTotal(), getNumReadsTotal() + getNumWritesTotal());
}
@JmxGetter(name = "PercentageReadBytes")
public double getPercentageReadBytes() {
return Utils.safeGetPercentage(getNumReadBytesTotal(), getNumWriteBytesTotal()
+ getNumReadBytesTotal());
}
@JmxGetter(name = "PercentageCacheHits")
public double getPercentageCacheHits() {
return 1.0d - getPercentageCacheMisses();
}
@JmxGetter(name = "PercentageCacheMisses")
public double getPercentageCacheMisses() {
return Utils.safeGetPercentage(getNumCacheMiss(), getNumReadsTotal() + getNumWritesTotal());
}
@JmxGetter(name = "PercentageContended")
public double getPercentageContended() {
return Utils.safeGetPercentage(getNumAcquiresWithContention(),
getNumAcquiresWithContention() + getNumAcquiresNoWaiters());
}
@JmxGetter(name = "PercentageUtilization")
public double getPercentageUtilization() {
return Utils.safeGetPercentage(getTotalSpaceUtilized(), getTotalSpace());
}
@JmxGetter(name = "PercentageBINMiss")
public double getPercentageBINMiss() {
return Utils.safeGetPercentage(getBINFetchMisses(), getBINFetches());
}
@JmxGetter(name = "PercentageINMiss")
public double getPercentageINMiss() {
return Utils.safeGetPercentage(getINFetchMisses(), getINFetches());
}
@JmxGetter(name = "PercentageLNMiss")
public double getPercentageLNMiss() {
return Utils.safeGetPercentage(getLNFetchMisses(), getLNFetches());
}
}
| |
package acidhax.cordova.chromecast;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.List;
import org.apache.cordova.CordovaWebView;
import org.apache.cordova.api.CallbackContext;
import org.apache.cordova.api.CordovaInterface;
import org.apache.cordova.api.CordovaPlugin;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.SharedPreferences;
import android.support.v7.media.MediaRouteSelector;
import android.support.v7.media.MediaRouter;
import android.support.v7.media.MediaRouter.RouteInfo;
import android.util.Log;
import com.google.android.gms.cast.CastMediaControlIntent;
public class Chromecast extends CordovaPlugin implements ChromecastOnMediaUpdatedListener, ChromecastOnSessionUpdatedListener {
private static final String SETTINGS_NAME= "CordovaChromecastSettings";
private MediaRouter mMediaRouter;
private MediaRouteSelector mMediaRouteSelector;
private volatile ChromecastMediaRouterCallback mMediaRouterCallback = new ChromecastMediaRouterCallback();
private String appId;
private boolean autoConnect = false;
private String lastSessionId = null;
private String lastAppId = null;
private SharedPreferences settings;
private volatile ChromecastSession currentSession;
private void log(String s) {
this.webView.sendJavascript("console.log('" + s + "');");
}
public void initialize(final CordovaInterface cordova, CordovaWebView webView) {
super.initialize(cordova, webView);
// Restore preferences
this.settings = this.cordova.getActivity().getSharedPreferences(SETTINGS_NAME, 0);
this.lastSessionId = settings.getString("lastSessionId", "");
this.lastAppId = settings.getString("lastAppId", "");
}
public void onDestroy() {
super.onDestroy();
if (this.currentSession != null) {
// this.currentSession.kill(new ChromecastSessionCallback() {
// void onSuccess(Object object) { }
// void onError(String reason) {}
// });
}
}
@Override
public boolean execute(String action, JSONArray args, CallbackContext cbContext) throws JSONException {
try {
Log.d("CordCast",action +", args: "+ args.toString());
Method[] list = this.getClass().getMethods();
Method methodToExecute = null;
for (Method method : list) {
if (method.getName().equals(action)) {
//Log.d("CordCast"," -> Method found: "+ method.getName());
Type[] types = method.getGenericParameterTypes();
if (args.length() + 1 == types.length) { // +1 is the cbContext
//Log.d("CordCast"," -> Args length OK");
// if (action.equals("setMediaVolume")) { Log.d("CordCast"," -> Arg: "+ args.get(0).getClass()); }
boolean isValid = true;
for (int i = 0; i < args.length(); i++) {
Class arg = args.get(i).getClass();
if (types[i] == arg) {
isValid = true;
} else {
isValid = false;
break;
}
}
if (isValid) {
methodToExecute = method;
break;
}
}
}
}
if (methodToExecute != null) {
Type[] types = methodToExecute.getGenericParameterTypes();
Object[] variableArgs = new Object[types.length];
for (int i = 0; i < args.length(); i++) {
variableArgs[i] = args.get(i);
}
variableArgs[variableArgs.length-1] = cbContext;
Class<?> r = methodToExecute.getReturnType();
if (r == boolean.class) {
return (Boolean) methodToExecute.invoke(this, variableArgs);
} else {
methodToExecute.invoke(this, variableArgs);
return true;
}
} else {
Log.e("CordCast","Could not execute: "+action);
return false;
}
} catch (IllegalAccessException e) {
e.printStackTrace();
return false;
} catch (IllegalArgumentException e) {
e.printStackTrace();
return false;
} catch (InvocationTargetException e) {
e.printStackTrace();
return false;
}
}
private void setLastSessionId(String sessionId) {
this.lastSessionId = sessionId;
this.settings.edit().putString("lastSessionId", sessionId).apply();
}
/**
* Do everything you need to for "setup" - calling back sets the isAvailable and lets every function on the
* javascript side actually do stuff.
* @param callbackContext
*/
public boolean setup (CallbackContext callbackContext) {
callbackContext.success();
return true;
}
/**
* Initialize all of the MediaRouter stuff with the AppId
* For now, ignore the autoJoinPolicy and defaultActionPolicy; those will come later
* @param appId The appId we're going to use for ALL session requests
* @param autoJoinPolicy tab_and_origin_scoped | origin_scoped | page_scoped
* @param defaultActionPolicy create_session | cast_this_tab
* @param callbackContext
*/
public boolean initialize (final String appId, String autoJoinPolicy, String defaultActionPolicy, final CallbackContext callbackContext) {
final Activity activity = cordova.getActivity();
final Chromecast that = this;
this.appId = appId;
log("initialize " + autoJoinPolicy + " " + appId + " " + this.lastAppId);
if (autoJoinPolicy.equals("origin_scoped") && appId.equals(this.lastAppId)) {
log("lastAppId " + lastAppId);
autoConnect = true;
} else if (autoJoinPolicy.equals("origin_scoped")) {
log("setting lastAppId " + lastAppId);
this.settings.edit().putString("lastAppId", appId).apply();
}
activity.runOnUiThread(new Runnable() {
public void run() {
mMediaRouter = MediaRouter.getInstance(activity.getApplicationContext());
mMediaRouteSelector = new MediaRouteSelector.Builder()
.addControlCategory(CastMediaControlIntent.categoryForCast(appId))
.build();
mMediaRouterCallback.registerCallbacks(that);
mMediaRouter.addCallback(mMediaRouteSelector, mMediaRouterCallback, MediaRouter.CALLBACK_FLAG_PERFORM_ACTIVE_SCAN);
callbackContext.success();
Chromecast.this.checkReceiverAvailable();
Chromecast.this.emitAllRoutes(null);
}
});
return true;
}
/**
* Request the session for the previously sent appId
* THIS IS WHAT LAUNCHES THE CHROMECAST PICKER
* NOTE: Make a request session that is automatic - it'll do most of this code - refactor will be required
* @param callbackContext
*/
public boolean requestSession (final CallbackContext callbackContext) {
if (this.currentSession != null) {
callbackContext.success(this.currentSession.createSessionObject());
return true;
}
this.setLastSessionId("");
final Activity activity = cordova.getActivity();
activity.runOnUiThread(new Runnable() {
public void run() {
mMediaRouter = MediaRouter.getInstance(activity.getApplicationContext());
final List<RouteInfo> routeList = mMediaRouter.getRoutes();
AlertDialog.Builder builder = new AlertDialog.Builder(activity);
builder.setTitle("Choose a Chromecast");
//CharSequence[] seq = new CharSequence[routeList.size() -1];
ArrayList<String> seq_tmp1 = new ArrayList<String>();
final ArrayList<Integer> seq_tmp_cnt_final = new ArrayList<Integer>();
for (int n = 1; n < routeList.size(); n++) {
RouteInfo route = routeList.get(n);
if (!route.getName().equals("Phone") && route.getId().indexOf("Cast") > -1) {
seq_tmp1.add(route.getName());
seq_tmp_cnt_final.add(n);
//seq[n-1] = route.getName();
}
}
CharSequence[] seq;
seq = seq_tmp1.toArray(new CharSequence[seq_tmp1.size()]);
builder.setNegativeButton("cancel", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
callbackContext.error("cancel");
}
});
builder.setItems(seq, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
which = seq_tmp_cnt_final.get(which);
RouteInfo selectedRoute = routeList.get(which);
//RouteInfo selectedRoute = routeList.get(which + 1);
Chromecast.this.createSession(selectedRoute, callbackContext);
}
});
builder.show();
}
});
return true;
}
/**
* Selects a route by its id
* @param routeId
* @param callbackContext
* @return
*/
public boolean selectRoute (final String routeId, final CallbackContext callbackContext) {
if (this.currentSession != null) {
callbackContext.success(this.currentSession.createSessionObject());
return true;
}
this.setLastSessionId("");
final Activity activity = cordova.getActivity();
activity.runOnUiThread(new Runnable() {
public void run() {
mMediaRouter = MediaRouter.getInstance(activity.getApplicationContext());
final List<RouteInfo> routeList = mMediaRouter.getRoutes();
for (RouteInfo route : routeList) {
if (route.getId().equals(routeId)) {
Chromecast.this.createSession(route, callbackContext);
return;
}
}
callbackContext.error("No route found");
}
});
return true;
}
/**
* Helper for the creating of a session! The user-selected RouteInfo needs to be passed to a new ChromecastSession
* @param routeInfo
* @param callbackContext
*/
private void createSession(RouteInfo routeInfo, final CallbackContext callbackContext) {
this.currentSession = new ChromecastSession(routeInfo, this.cordova, this, this);
// Launch the app.
this.currentSession.launch(this.appId, new ChromecastSessionCallback() {
@Override
void onSuccess(Object object) {
ChromecastSession session = (ChromecastSession) object;
if (object == null) {
onError("unknown");
} else if (session == Chromecast.this.currentSession){
Chromecast.this.setLastSessionId(Chromecast.this.currentSession.getSessionId());
if (callbackContext != null) {
callbackContext.success(session.createSessionObject());
} else {
Chromecast.this.webView.sendJavascript("chrome.cast._.sessionJoined(" + Chromecast.this.currentSession.createSessionObject().toString() + ");");
}
}
}
@Override
void onError(String reason) {
if (reason != null) {
Chromecast.this.log("createSession onError " + reason);
if (callbackContext != null) {
callbackContext.error(reason);
}
} else {
if (callbackContext != null) {
callbackContext.error("unknown");
}
}
}
});
}
private void joinSession(RouteInfo routeInfo) {
ChromecastSession sessionJoinAttempt = new ChromecastSession(routeInfo, this.cordova, this, this);
sessionJoinAttempt.join(this.appId, this.lastSessionId, new ChromecastSessionCallback() {
@Override
void onSuccess(Object object) {
if (Chromecast.this.currentSession == null) {
try {
Chromecast.this.currentSession = (ChromecastSession) object;
Chromecast.this.setLastSessionId(Chromecast.this.currentSession.getSessionId());
Chromecast.this.webView.sendJavascript("chrome.cast._.sessionJoined(" + Chromecast.this.currentSession.createSessionObject().toString() + ");");
} catch (Exception e) {
log("wut.... " + e.getMessage() + e.getStackTrace());
}
}
}
@Override
void onError(String reason) {
log("sessionJoinAttempt error " +reason);
}
});
}
/**
* Set the volume level on the receiver - this is a Chromecast volume, not a Media volume
* @param newLevel
*/
public boolean setReceiverVolumeLevel (Double newLevel, CallbackContext callbackContext) {
if (this.currentSession != null) {
this.currentSession.setVolume(newLevel, genericCallback(callbackContext));
} else {
callbackContext.error("session_error");
}
return true;
}
public boolean setReceiverVolumeLevel (Integer newLevel, CallbackContext callbackContext) {
return this.setReceiverVolumeLevel(newLevel.doubleValue(), callbackContext);
}
/**
* Sets the muted boolean on the receiver - this is a Chromecast mute, not a Media mute
* @param muted
* @param callbackContext
*/
public boolean setReceiverMuted (Boolean muted, CallbackContext callbackContext) {
if (this.currentSession != null) {
this.currentSession.setMute(muted, genericCallback(callbackContext));
} else {
callbackContext.error("session_error");
}
return true;
}
/**
* Stop the session! Disconnect! All of that jazz!
* @param callbackContext [description]
*/
public boolean stopSession(CallbackContext callbackContext) {
callbackContext.error("not_implemented");
return true;
}
/**
* Send a custom message to the receiver - we don't need this just yet... it was just simple to implement on the js side
* @param namespace
* @param message
* @param callbackContext
*/
public boolean sendMessage (String namespace, String message, final CallbackContext callbackContext) {
if (this.currentSession != null) {
this.currentSession.sendMessage(namespace, message, new ChromecastSessionCallback() {
@Override
void onSuccess(Object object) {
callbackContext.success();
}
@Override
void onError(String reason) {
callbackContext.error(reason);
}
});
}
return true;
}
/**
* Adds a listener to a specific namespace
* @param namespace
* @param callbackContext
* @return
*/
public boolean addMessageListener(String namespace, CallbackContext callbackContext) {
if (this.currentSession != null) {
this.currentSession.addMessageListener(namespace);
callbackContext.success();
}
return true;
}
/**
* Loads some media on the Chromecast using the media APIs
* @param contentId The URL of the media item
* @param contentType MIME type of the content
* @param duration Duration of the content
* @param streamType buffered | live | other
* @param loadRequest.autoPlay Whether or not to automatically start playing the media
* @param loadReuqest.currentTime Where to begin playing from
* @param callbackContext
*/
public boolean loadMedia (String contentId, String contentType, Integer duration, String streamType, Boolean autoPlay, Double currentTime, JSONObject metadata, final CallbackContext callbackContext) {
if (this.currentSession != null) {
return this.currentSession.loadMedia(contentId, contentType, duration, streamType, autoPlay, currentTime, metadata,
new ChromecastSessionCallback() {
@Override
void onSuccess(Object object) {
if (object == null) {
onError("unknown");
} else {
callbackContext.success((JSONObject) object);
}
}
@Override
void onError(String reason) {
callbackContext.error(reason);
}
});
} else {
callbackContext.error("session_error");
return false;
}
}
public boolean loadMedia (String contentId, String contentType, Integer duration, String streamType, Boolean autoPlay, Integer currentTime, JSONObject metadata, final CallbackContext callbackContext) {
return this.loadMedia (contentId, contentType, duration, streamType, autoPlay, new Double(currentTime.doubleValue()), metadata, callbackContext);
}
/**
* Play on the current media in the current session
* @param callbackContext
* @return
*/
public boolean mediaPlay(CallbackContext callbackContext) {
if (currentSession != null) {
currentSession.mediaPlay(genericCallback(callbackContext));
} else {
callbackContext.error("session_error");
}
return true;
}
/**
* Pause on the current media in the current session
* @param callbackContext
* @return
*/
public boolean mediaPause(CallbackContext callbackContext) {
if (currentSession != null) {
currentSession.mediaPause(genericCallback(callbackContext));
} else {
callbackContext.error("session_error");
}
return true;
}
/**
* Seeks the current media in the current session
* @param seekTime
* @param resumeState
* @param callbackContext
* @return
*/
public boolean mediaSeek(Integer seekTime, String resumeState, CallbackContext callbackContext) {
if (currentSession != null) {
currentSession.mediaSeek(seekTime.longValue() * 1000, resumeState, genericCallback(callbackContext));
} else {
callbackContext.error("session_error");
}
return true;
}
/**
* Set the volume on the media
* @param level
* @param callbackContext
* @return
*/
public boolean setMediaVolume(Double level, CallbackContext callbackContext) {
if (currentSession != null) {
currentSession.mediaSetVolume(level, genericCallback(callbackContext));
} else {
callbackContext.error("session_error");
}
return true;
} // Whooptie added this because JS thinks 1.0 == an integer :(
public boolean setMediaVolume(Integer level, CallbackContext callbackContext) {
return setMediaVolume((double)level,callbackContext);
}
/**
* Set the muted on the media
* @param muted
* @param callbackContext
* @return
*/
public boolean setMediaMuted(Boolean muted, CallbackContext callbackContext) {
if (currentSession != null) {
currentSession.mediaSetMuted(muted, genericCallback(callbackContext));
} else {
callbackContext.error("session_error");
}
return true;
}
/**
* Stops the current media!
* @param callbackContext
* @return
*/
public boolean mediaStop(CallbackContext callbackContext) {
if (currentSession != null) {
currentSession.mediaStop(genericCallback(callbackContext));
} else {
callbackContext.error("session_error");
}
return true;
}
/**
* Stops the session
* @param callbackContext
* @return
*/
public boolean sessionStop (CallbackContext callbackContext) {
if (this.currentSession != null) {
this.currentSession.kill(genericCallback(callbackContext));
this.currentSession = null;
this.setLastSessionId("");
} else {
callbackContext.success();
}
return true;
}
public boolean emitAllRoutes(CallbackContext callbackContext) {
final Activity activity = cordova.getActivity();
activity.runOnUiThread(new Runnable() {
public void run() {
mMediaRouter = MediaRouter.getInstance(activity.getApplicationContext());
List<RouteInfo> routeList = mMediaRouter.getRoutes();
for (RouteInfo route : routeList) {
if (!route.getName().equals("Phone") && route.getId().indexOf("Cast") > -1) {
Chromecast.this.webView.sendJavascript("chrome.cast._.routeAdded(" + routeToJSON(route) + ")");
}
}
}
});
if (callbackContext != null) {
callbackContext.success();
}
return true;
}
/**
* Checks to see how many receivers are available - emits the receiver status down to Javascript
*/
private void checkReceiverAvailable() {
final Activity activity = cordova.getActivity();
activity.runOnUiThread(new Runnable() {
public void run() {
mMediaRouter = MediaRouter.getInstance(activity.getApplicationContext());
List<RouteInfo> routeList = mMediaRouter.getRoutes();
boolean available = false;
for (RouteInfo route: routeList) {
if (!route.getName().equals("Phone") && route.getId().indexOf("Cast") > -1) {
available = true;
break;
}
}
if (available || (Chromecast.this.currentSession != null && Chromecast.this.currentSession.isConnected())) {
Chromecast.this.webView.sendJavascript("chrome.cast._.receiverAvailable()");
} else {
Chromecast.this.webView.sendJavascript("chrome.cast._.receiverUnavailable()");
}
}
});
}
/**
* Creates a ChromecastSessionCallback that's generic for a CallbackContext
* @param callbackContext
* @return
*/
private ChromecastSessionCallback genericCallback (final CallbackContext callbackContext) {
return new ChromecastSessionCallback() {
@Override
public void onSuccess(Object object) {
callbackContext.success();
}
@Override
public void onError(String reason) {
callbackContext.error(reason);
}
};
};
/**
* Called when a route is discovered
* @param router
* @param route
*/
protected void onRouteAdded(MediaRouter router, final RouteInfo route) {
if (this.autoConnect && this.currentSession == null && !route.getName().equals("Phone")) {
log("Attempting to join route " + route.getName());
this.joinSession(route);
} else {
log("For some reason, not attempting to join route " + route.getName() + ", " + this.currentSession + ", " + this.autoConnect);
}
if (!route.getName().equals("Phone") && route.getId().indexOf("Cast") > -1) {
this.webView.sendJavascript("chrome.cast._.routeAdded(" + routeToJSON(route) + ")");
}
this.checkReceiverAvailable();
}
/**
* Called when a discovered route is lost
* @param router
* @param route
*/
protected void onRouteRemoved(MediaRouter router, RouteInfo route) {
this.checkReceiverAvailable();
if (!route.getName().equals("Phone") && route.getId().indexOf("Cast") > -1) {
this.webView.sendJavascript("chrome.cast._.routeRemoved(" + routeToJSON(route) + ")");
}
}
/**
* Called when a route is selected through the MediaRouter
* @param router
* @param route
*/
protected void onRouteSelected(MediaRouter router, RouteInfo route) {
this.createSession(route, null);
}
/**
* Called when a route is unselected through the MediaRouter
* @param router
* @param route
*/
protected void onRouteUnselected(MediaRouter router, RouteInfo route) {}
/**
* Simple helper to convert a route to JSON for passing down to the javascript side
* @param route
* @return
*/
private JSONObject routeToJSON(RouteInfo route) {
JSONObject obj = new JSONObject();
try {
obj.put("name", route.getName());
obj.put("id", route.getId());
} catch (JSONException e) {
e.printStackTrace();
}
return obj;
}
@Override
public void onMediaUpdated(JSONObject media) {
this.webView.sendJavascript("chrome.cast._.mediaUpdated(" + media.toString() +");");
}
@Override
public void onSessionUpdated(boolean isAlive, JSONObject session) {
if (isAlive) {
this.webView.sendJavascript("chrome.cast._.sessionUpdated(true, " + session.toString() + ");");
} else {
log("SESSION DESTROYYYY");
this.webView.sendJavascript("chrome.cast._.sessionUpdated(false, " + session.toString() + ");");
this.currentSession = null;
}
}
@Override
public void onMediaLoaded(JSONObject media) {
this.webView.sendJavascript("chrome.cast._.mediaLoaded(" + media.toString() +");");
}
@Override
public void onMessage(ChromecastSession session, String namespace, String message) {
this.webView.sendJavascript("chrome.cast._.onMessage('" + session.getSessionId() +"', '" + namespace + "', '" + message + "')");
}
}
| |
/////////////////////////////////////////////////////////////////////////////////////////
//
// The MIT License (MIT)
//
// Copyright (c) 2014-2015 Keld Oelykke
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
/////////////////////////////////////////////////////////////////////////////////////////
package starkcoder.failfast.unit.objects.enums;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import starkcoder.failfast.FailFast;
import starkcoder.failfast.IFailFast;
import starkcoder.failfast.SFailFast;
import starkcoder.failfast.checks.Checker;
import starkcoder.failfast.checks.IChecker;
import starkcoder.failfast.contractors.CallContractor;
import starkcoder.failfast.contractors.ICallContractor;
import starkcoder.failfast.fails.FailFastException;
import starkcoder.failfast.fails.Failer;
import starkcoder.failfast.fails.IFailer;
import starkcoder.failfast.templates.comparables.IComparableLessOrEqualsTest;
/**
* Fail-fast unit test of {link:IObjectEnumLessOrEqualsCheck} and
* {link:IObjectEnumLessOrEqualsFail}.
*
* @author Keld Oelykke
*/
public class EnumLessOrEqualsTest implements IComparableLessOrEqualsTest<EBar>
{
private IChecker checker;
private IFailer failer;
private String toString = null;
@Override
public String toString()
{
return this.toString;
}
@Rule
public TestWatcher watcher = new TestWatcher()
{
protected void starting(Description description)
{
toString = description.getTestClass().getSimpleName() + "." + description.getMethodName();
}
};
/**
* Setup FailFast instances.
*/
@Before
public void setUp()
{
// this would be in you application startup section
ICallContractor callContractor = new CallContractor();
IFailFast failFastOrNull = new FailFast(new Checker(callContractor),
new Failer(callContractor), callContractor);
SFailFast.setFailFastOrNull(failFastOrNull);
this.checker = SFailFast.getChecker();
this.failer = SFailFast.getFailer();
}
/**
* Clear FailFast instances.
*/
@After
public void tearDown()
{
// this would be in you application shutdown section
SFailFast.setFailFastOrNull(null);
this.checker = null;
this.failer = null;
}
// 1st - caller checks
@Test(expected = IllegalArgumentException.class)
public void testComparableLessOrEqualsCheckerCallerIsNull()
{
EBar valueA = EBar.VALUE_A;
EBar valueB = EBar.VALUE_B;
if (checker.isEnumLessOrEquals(null, valueA, valueB))
{
failer.failEnumLessOrEquals(this, "valueA", "valueB");
}
}
@Test(expected = IllegalArgumentException.class)
public void testComparableLessOrEqualsFailerCallerIsNull()
{
EBar valueA = EBar.VALUE_A;
EBar valueB = EBar.VALUE_B;
if (checker.isEnumLessOrEquals(this, valueA, valueB))
{
failer.failEnumLessOrEquals(null, "valueA", "valueB");
}
}
@Test(expected = IllegalStateException.class)
public void testComparableLessOrEqualsFailerCallerIsWrong()
{
EBar valueA = EBar.VALUE_A;
EBar valueB = EBar.VALUE_B;
if (checker.isEnumLessOrEquals(new String("Foo"), valueA, valueB))
{
failer.failEnumLessOrEquals(new String("Bar"), "valueA", "valueB");
}
}
// 2nd - mismatch calls
@Test(expected = IllegalStateException.class)
public void testComparableLessOrEqualsMismatchCheckCheck()
{
EBar valueA = EBar.VALUE_A;
EBar valueB = EBar.VALUE_B;
if (checker.isEnumLessOrEquals(this, valueA, valueB))
{
checker.isEnumLessOrEquals(this, valueA, valueB);
}
}
@Test(expected = IllegalStateException.class)
public void testComparableLessOrEqualsMismatchFail()
{
failer.failEnumLessOrEquals(this, "valueA", "valueB");
}
@Test(expected = IllegalStateException.class)
public void testComparableLessOrEqualsMismatchWrongCheck()
{
EBar valueA = EBar.VALUE_C;
EBar valueB = EBar.VALUE_B;
if (checker.isEnumGreater(this, valueA, valueB)) // wrong call
{
failer.failEnumLessOrEquals(this, "valueA", "valueB");
}
}
@Test(expected = IllegalStateException.class)
public void testComparableLessOrEqualsMismatchWrongFail()
{
EBar valueA = EBar.VALUE_A;
EBar valueB = EBar.VALUE_B;
if (checker.isEnumLessOrEquals(this, valueA, valueB))
{
failer.failEnumGreater(this, "valueA", "valueB"); // wrong call
}
}
// 3rd - normal cases
@Test(expected = FailFastException.class)
public void testComparableLessFailNoMessage()
{
EBar valueA = EBar.VALUE_A;
EBar valueB = EBar.VALUE_B;
try
{
if (checker.isEnumLessOrEquals(this, valueA, valueB))
{
failer.failEnumLessOrEquals(this, "valueA", "valueB");
}
}
catch (FailFastException failFastException)
{
assertEquals("Expected registered exception in failer", failFastException,
failer.getFailFastExceptionOrNull());
System.out.println(failFastException.getMessage());
throw failFastException;
}
}
@Test(expected = FailFastException.class)
public void testComparableLessFailMessage()
{
EBar valueA = EBar.VALUE_A;
EBar valueB = EBar.VALUE_B;
try
{
if (checker.isEnumLessOrEquals(this, valueA, valueB))
{
failer.failEnumLessOrEquals(this, "valueA", "valueB", "Extra info goes here");
}
}
catch (FailFastException failFastException)
{
assertEquals("Expected registered exception in failer", failFastException,
failer.getFailFastExceptionOrNull());
System.out.println(failFastException.getMessage());
throw failFastException;
}
}
@Test(expected = FailFastException.class)
public void testComparableEqualsFailNoMessage()
{
EBar valueA = EBar.VALUE_C;
EBar valueB = EBar.VALUE_C;
try
{
if (checker.isEnumLessOrEquals(this, valueA, valueB))
{
failer.failEnumLessOrEquals(this, "valueA", "valueB");
}
}
catch (FailFastException failFastException)
{
assertEquals("Expected registered exception in failer", failFastException,
failer.getFailFastExceptionOrNull());
System.out.println(failFastException.getMessage());
throw failFastException;
}
}
@Test(expected = FailFastException.class)
public void testComparableEqualsFailMessage()
{
EBar valueA = EBar.VALUE_C;
EBar valueB = EBar.VALUE_C;
try
{
if (checker.isEnumLessOrEquals(this, valueA, valueB))
{
failer.failEnumLessOrEquals(this, "valueA", "valueB", "Extra info goes here");
}
}
catch (FailFastException failFastException)
{
assertEquals("Expected registered exception in failer", failFastException,
failer.getFailFastExceptionOrNull());
System.out.println(failFastException.getMessage());
throw failFastException;
}
}
@Test
public void testComparableLessOrEqualsNoFail()
{
EBar valueA = EBar.VALUE_C;
EBar valueB = EBar.VALUE_B;
if (checker.isEnumLessOrEquals(this, valueA, valueB))
{
failer.failEnumLessOrEquals(this, "valueA", "valueB");
}
assertTrue("Expected valueA & valueB to pass the equals check", true);
assertNull("Expected no registered exception in failer", failer.getFailFastExceptionOrNull());
}
}
| |
/*
Copyright (C) 2001, 2008 United States Government as represented by
the Administrator of the National Aeronautics and Space Administration.
All Rights Reserved.
*/
package gov.nasa.worldwind.applications.gio.csw;
import gov.nasa.worldwind.applications.gio.xml.ElementParser;
import gov.nasa.worldwind.util.Logging;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
/**
* @author dcollins
* @version $Id: SearchResultsParser.java 5517 2008-07-15 23:36:34Z dcollins $
*/
public class SearchResultsParser extends ElementParser implements SearchResults
{
private List<Object> recordList;
private String resultSetId;
private ElementSetType elementSet;
private String recordSchema;
private int numberOfRecordsMatched;
private int numberOfRecordsReturned;
private int nextRecord;
private String expires;
public static final String ELEMENT_NAME = "SearchResults";
private static final String RESULT_SET_ID_ATTRIBUTE_NAME = "resultSetId";
private static final String ELEMENT_SET_ATTRIBUTE_NAME = "elementSet";
private static final String RECORD_SCHEMA_ATTRIBUTE_NAME = "recordSchema";
private static final String NUMBER_OF_RECORDS_MATCHED_ATTRIBUTE_NAME = "numberOfRecordsMatched";
private static final String NUMBER_OF_RECORDS_RETURNED_ATTRIBUTE_NAME = "numberOfRecordsReturned";
private static final String NEXT_RECORD_ATTRIBUTE_NAME = "nextRecord";
private static final String EXPIRES_ATTRIBUTE_NAME = "expires";
public SearchResultsParser(String elementName, org.xml.sax.Attributes attributes)
{
super(elementName, attributes);
if (attributes == null)
{
String message = Logging.getMessage("nullValue.AttributesIsNull");
Logging.logger().severe(message);
throw new IllegalArgumentException(message);
}
this.recordList = new ArrayList<Object>();
for (int i = 0; i < attributes.getLength(); i++)
{
String attribName = attributes.getLocalName(i);
if (RESULT_SET_ID_ATTRIBUTE_NAME.equalsIgnoreCase(attribName))
this.resultSetId = attributes.getValue(i);
else if (ELEMENT_SET_ATTRIBUTE_NAME.equalsIgnoreCase(attribName))
this.elementSet = parseElementSetType(attributes.getValue(i));
else if (RECORD_SCHEMA_ATTRIBUTE_NAME.equalsIgnoreCase(attribName))
this.recordSchema = attributes.getValue(i);
else if (NUMBER_OF_RECORDS_MATCHED_ATTRIBUTE_NAME.equalsIgnoreCase(attribName))
this.numberOfRecordsMatched = parseInt(attributes.getValue(i));
else if (NUMBER_OF_RECORDS_RETURNED_ATTRIBUTE_NAME.equalsIgnoreCase(attribName))
this.numberOfRecordsReturned = parseInt(attributes.getValue(i));
else if (NEXT_RECORD_ATTRIBUTE_NAME.equalsIgnoreCase(attribName))
this.nextRecord = parseInt(attributes.getValue(i));
else if (EXPIRES_ATTRIBUTE_NAME.equalsIgnoreCase(attribName))
this.expires = attributes.getValue(i);
}
}
protected static ElementSetType parseElementSetType(String s)
{
ElementSetType type = null;
if (ElementSetType.BRIEF.getType().equalsIgnoreCase(s))
type = ElementSetType.BRIEF;
else if (ElementSetType.FULL.getType().equalsIgnoreCase(s))
type = ElementSetType.FULL;
else if (ElementSetType.SUMMARY.getType().equalsIgnoreCase(s))
type = ElementSetType.SUMMARY;
return type;
}
protected static int parseInt(String s)
{
int i = -1;
try
{
if (s != null)
i = Integer.parseInt(s);
}
catch (Exception e)
{
String message = "csw.ErrorParsingValue " + s;
Logging.logger().log(java.util.logging.Level.SEVERE, message, e);
}
return i;
}
public int getRecordCount()
{
return this.recordList.size();
}
public int getIndex(Object o)
{
return this.recordList.indexOf(o);
}
public Object getRecord(int index)
{
if (index < 0 || index >= this.recordList.size())
{
String message = Logging.getMessage("generic.ArgumentOutOfRange", index);
Logging.logger().severe(message);
throw new IllegalArgumentException(message);
}
return this.recordList.get(index);
}
public void setRecord(int index, Object o)
{
if (index < 0 || index >= this.recordList.size())
{
String message = Logging.getMessage("generic.ArgumentOutOfRange", index);
Logging.logger().severe(message);
throw new IllegalArgumentException(message);
}
this.recordList.set(index, o);
}
public void addRecord(int index, Object o)
{
if (index < 0 || index > this.recordList.size())
{
String message = Logging.getMessage("generic.ArgumentOutOfRange", index);
Logging.logger().severe(message);
throw new IllegalArgumentException(message);
}
this.recordList.add(index, o);
}
public void addRecord(Object o)
{
this.recordList.add(o);
}
public void addRecords(Collection<?> c)
{
if (c == null)
{
String message = Logging.getMessage("nullValue.CollectionIsNull");
Logging.logger().severe(message);
throw new IllegalArgumentException(message);
}
this.recordList.addAll(c);
}
public void removeRecord(int index)
{
if (index < 0 || index >= this.recordList.size())
{
String message = Logging.getMessage("generic.ArgumentOutOfRange", index);
Logging.logger().severe(message);
throw new IllegalArgumentException(message);
}
this.recordList.remove(index);
}
public void clearRecords()
{
this.recordList.clear();
}
public Iterator<Object> iterator()
{
return this.recordList.iterator();
}
public String getResultSetId()
{
return this.resultSetId;
}
public void setResultSetId(String resultSetId)
{
this.resultSetId = resultSetId;
}
public ElementSetType getElementSet()
{
return this.elementSet;
}
public void setElementSet(ElementSetType elementSet)
{
this.elementSet = elementSet;
}
public String getRecordSchema()
{
return this.recordSchema;
}
public void setRecordSchema(String recordSchema)
{
this.recordSchema = recordSchema;
}
public int getNumberOfRecordsMatched()
{
return this.numberOfRecordsMatched;
}
public void setNumberOfRecordsMatched(int numberOfRecordsMatched)
{
this.numberOfRecordsMatched = numberOfRecordsMatched;
}
public int getNumberOfRecordsReturned()
{
return this.numberOfRecordsReturned;
}
public void setNumberOfRecordsReturned(int numberOfRecordsReturned)
{
this.numberOfRecordsReturned = numberOfRecordsReturned;
}
public int getNextRecord()
{
return this.nextRecord;
}
public void setNextRecord(int nextRecord)
{
this.nextRecord = nextRecord;
}
public String getExpires()
{
return this.expires;
}
public void setExpires(String expires)
{
this.expires = expires;
}
}
| |
package io.github.privacystreams.utils;
import android.content.Context;
import android.net.Uri;
import android.os.Build;
import android.os.Environment;
import android.support.v4.content.FileProvider;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* A helper class to access storage-related functions.
*/
public class StorageUtils {
private static final String LOG_TAG = "StorageUtils - ";
public static final Object fileRWMutex = new Object();
/**
* Get the directory for the public directory.
*
* @param dirPath the path of dir
* @return the directory in public dir
*/
public static File getPublicDir(String dirPath) {
String fullDirPath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/" + dirPath;
File targetDir = new File(fullDirPath);
if (!targetDir.exists() && !targetDir.mkdirs()) {
Logging.warn(LOG_TAG + "fail to create dir: " + targetDir);
return null;
}
return targetDir;
}
/**
* Get the relative path of a given file from the public directory.
*
* @param file the file to get path
* @return the relative path
*/
public static String getPublicRelativePath(File file) {
File publicDir = Environment.getExternalStorageDirectory();
String relativePath = getRelativePath(file, publicDir);
return relativePath == null ? file.getAbsolutePath() : relativePath;
}
/**
* Get the relative path of a given file from the private directory.
*
* @param file the file to get path
* @return the relative path
*/
public static String getPrivateRelativePath(Context context, File file) {
File privateDir = context.getFilesDir();
String relativePath = getRelativePath(file, privateDir);
return relativePath == null ? file.getAbsolutePath() : relativePath;
}
/**
* Get the directory for the user's private directory.
*
* @param dirPath the path of dir
* @return the directory in private dir
*/
public static File getPrivateDir(Context context, String dirPath) {
String fullDirPath = context.getFilesDir().getAbsolutePath() + "/" + dirPath;
File targetDir = new File(fullDirPath);
if (!targetDir.exists() && !targetDir.mkdirs()) {
Logging.warn(LOG_TAG + "fail to create dir: " + targetDir);
return null;
}
return targetDir;
}
/**
* Get a valid file of a given file path.
*
* @param context a Context instance
* @param filePath the original file path
* @param isPublic if true, the valid path will be in external storage (sdcard);
* if false, the valid path will be in internal storage.
* @return the valid file
*/
public static File getValidFile(Context context, String filePath, boolean isPublic) {
String dirPath, fileName;
int lastPathSepIndex = filePath.lastIndexOf('/');
if (lastPathSepIndex < 0) {
dirPath = "";
fileName = filePath;
}
else {
dirPath = filePath.substring(0, lastPathSepIndex);
fileName = filePath.substring(lastPathSepIndex + 1);
}
File dirFile;
if (isPublic) {
dirFile = getPublicDir(dirPath);
}
else {
dirFile = getPrivateDir(context, dirPath);
}
return new File(dirFile, fileName);
}
public static void writeToFile(String content, File validFile, boolean append) {
try {
synchronized (fileRWMutex) {
String contentToWrite = append ? Globals.StorageConfig.fileAppendSeparator + content : content;
FileOutputStream fileOutputStream = new FileOutputStream(validFile, append);
fileOutputStream.write(contentToWrite.getBytes());
fileOutputStream.close();
}
} catch (IOException e) {
Logging.warn("error writing data to file.");
e.printStackTrace();
}
}
/**
* Get the input stream of a file and delete the file.
*
* @param file the file to read
* @return the InputStream
*/
public static InputStream getInputStreamAndDelete(File file) {
ByteArrayOutputStream tempOutStream = new ByteArrayOutputStream();
InputStream resultInputStream = null;
try {
synchronized (StorageUtils.fileRWMutex) {
FileInputStream inputStream = new FileInputStream(file);
byte[] buffer = new byte[1024];
int len;
while ((len = inputStream.read(buffer)) > -1) {
tempOutStream.write(buffer, 0, len);
}
tempOutStream.flush();
inputStream.close();
StorageUtils.safeDelete(file);
resultInputStream = new ByteArrayInputStream(tempOutStream.toByteArray());
}
} catch (IOException e) {
Logging.warn("error getting data from file.");
e.printStackTrace();
}
return resultInputStream;
}
/**
* Get the Uri of a file.
*
* @param context a Context instance
* @param file the file
* @return the Uri
*/
private static Uri getUri(Context context, File file) {
Uri uri;
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
uri = Uri.fromFile(file);
} else {
uri = FileProvider.getUriForFile(context, context.getApplicationContext().getPackageName() + ".provider", file);
}
return uri;
}
/**
* Get the relative path from a file to a directory.
* returns null if the file doesn't belong to the folder.
*
* @param file the file to get relative path to.
* @param folder the directory to get relative path from.
* @return the relative path, or null if the file doesn't belong to the folder.
*/
public static String getRelativePath(File file, File folder) {
String filePath = file.getAbsolutePath();
String folderPath = folder.getAbsolutePath();
if (filePath.startsWith(folderPath)) {
return filePath.substring(folderPath.length() + 1);
} else {
return null;
}
}
/**
* Delete a file without throw any exception.
* @param file the file to delete.
*/
public static void safeDelete(File file) {
try {
if (!file.delete()) file.deleteOnExit();
}
catch (Exception ignored) {
Logging.warn("Failed to delete file: " + file.getAbsolutePath());
}
}
}
| |
package org.apache.maven;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.ArtifactUtils;
import org.apache.maven.artifact.resolver.ArtifactNotFoundException;
import org.apache.maven.artifact.resolver.ArtifactResolutionException;
import org.apache.maven.artifact.resolver.ArtifactResolutionRequest;
import org.apache.maven.artifact.resolver.ArtifactResolutionResult;
import org.apache.maven.artifact.resolver.MultipleArtifactsNotFoundException;
import org.apache.maven.artifact.resolver.ResolutionErrorHandler;
import org.apache.maven.artifact.resolver.filter.CumulativeScopeArtifactFilter;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.artifact.ProjectArtifact;
import org.apache.maven.repository.RepositorySystem;
/**
* @deprecated As of 3.2.2, and there is no direct replacement. This is an internal class which was not marked as such,
* but should have been.
*
*/
@Deprecated
@Named
@Singleton
public class DefaultProjectDependenciesResolver
implements ProjectDependenciesResolver
{
@Inject
private RepositorySystem repositorySystem;
@Inject
private ResolutionErrorHandler resolutionErrorHandler;
public Set<Artifact> resolve( MavenProject project, Collection<String> scopesToResolve, MavenSession session )
throws ArtifactResolutionException, ArtifactNotFoundException
{
return resolve( Collections.singleton( project ), scopesToResolve, session );
}
public Set<Artifact> resolve( MavenProject project, Collection<String> scopesToCollect,
Collection<String> scopesToResolve, MavenSession session )
throws ArtifactResolutionException, ArtifactNotFoundException
{
Set<MavenProject> mavenProjects = Collections.singleton( project );
return resolveImpl( mavenProjects, scopesToCollect, scopesToResolve, session,
getIgnorableArtifacts( mavenProjects ) );
}
public Set<Artifact> resolve( Collection<? extends MavenProject> projects, Collection<String> scopesToResolve,
MavenSession session )
throws ArtifactResolutionException, ArtifactNotFoundException
{
return resolveImpl( projects, null, scopesToResolve, session, getIgnorableArtifacts( projects ) );
}
public Set<Artifact> resolve( MavenProject project, Collection<String> scopesToCollect,
Collection<String> scopesToResolve, MavenSession session,
Set<Artifact> ignoreableArtifacts )
throws ArtifactResolutionException, ArtifactNotFoundException
{
return resolveImpl( Collections.singleton( project ), scopesToCollect, scopesToResolve, session,
getIgnorableArtifacts( ignoreableArtifacts ) );
}
private Set<Artifact> resolveImpl( Collection<? extends MavenProject> projects, Collection<String> scopesToCollect,
Collection<String> scopesToResolve, MavenSession session,
Set<String> projectIds )
throws ArtifactResolutionException, ArtifactNotFoundException
{
Set<Artifact> resolved = new LinkedHashSet<>();
if ( projects == null || projects.isEmpty() )
{
return resolved;
}
if ( ( scopesToCollect == null || scopesToCollect.isEmpty() )
&& ( scopesToResolve == null || scopesToResolve.isEmpty() ) )
{
return resolved;
}
/*
Logic for transitive global exclusions
List<String> exclusions = new ArrayList<String>();
for ( Dependency d : project.getDependencies() )
{
if ( d.getExclusions() != null )
{
for ( Exclusion e : d.getExclusions() )
{
exclusions.add( e.getGroupId() + ":" + e.getArtifactId() );
}
}
}
ArtifactFilter scopeFilter = new ScopeArtifactFilter( scope );
ArtifactFilter filter;
if ( ! exclusions.isEmpty() )
{
filter = new AndArtifactFilter( Arrays.asList( new ArtifactFilter[]{
new ExcludesArtifactFilter( exclusions ), scopeFilter } ) );
}
else
{
filter = scopeFilter;
}
*/
CumulativeScopeArtifactFilter resolutionScopeFilter = new CumulativeScopeArtifactFilter( scopesToResolve );
CumulativeScopeArtifactFilter collectionScopeFilter = new CumulativeScopeArtifactFilter( scopesToCollect );
collectionScopeFilter = new CumulativeScopeArtifactFilter( collectionScopeFilter, resolutionScopeFilter );
ArtifactResolutionRequest request =
new ArtifactResolutionRequest().setResolveRoot( false ).setResolveTransitively( true ).setCollectionFilter(
collectionScopeFilter ).setResolutionFilter( resolutionScopeFilter ).setLocalRepository(
session.getLocalRepository() ).setOffline( session.isOffline() ).setForceUpdate(
session.getRequest().isUpdateSnapshots() );
request.setServers( session.getRequest().getServers() );
request.setMirrors( session.getRequest().getMirrors() );
request.setProxies( session.getRequest().getProxies() );
for ( MavenProject project : projects )
{
request.setArtifact( new ProjectArtifact( project ) );
request.setArtifactDependencies( project.getDependencyArtifacts() );
request.setManagedVersionMap( project.getManagedVersionMap() );
request.setRemoteRepositories( project.getRemoteArtifactRepositories() );
ArtifactResolutionResult result = repositorySystem.resolve( request );
try
{
resolutionErrorHandler.throwErrors( request, result );
}
catch ( MultipleArtifactsNotFoundException e )
{
Collection<Artifact> missing = new HashSet<>( e.getMissingArtifacts() );
for ( Iterator<Artifact> it = missing.iterator(); it.hasNext(); )
{
String key = ArtifactUtils.key( it.next() );
if ( projectIds.contains( key ) )
{
it.remove();
}
}
if ( !missing.isEmpty() )
{
throw e;
}
}
resolved.addAll( result.getArtifacts() );
}
return resolved;
}
private Set<String> getIgnorableArtifacts( Collection<? extends MavenProject> projects )
{
Set<String> projectIds = new HashSet<>( projects.size() * 2 );
for ( MavenProject p : projects )
{
String key = ArtifactUtils.key( p.getGroupId(), p.getArtifactId(), p.getVersion() );
projectIds.add( key );
}
return projectIds;
}
private Set<String> getIgnorableArtifacts( Iterable<Artifact> artifactIterable )
{
Set<String> projectIds = new HashSet<>();
for ( Artifact artifact : artifactIterable )
{
String key = ArtifactUtils.key( artifact );
projectIds.add( key );
}
return projectIds;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.util.modeler.modules;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.juli.logging.Log;
import org.apache.juli.logging.LogFactory;
import org.apache.tomcat.util.DomUtil;
import org.apache.tomcat.util.modeler.AttributeInfo;
import org.apache.tomcat.util.modeler.ManagedBean;
import org.apache.tomcat.util.modeler.NotificationInfo;
import org.apache.tomcat.util.modeler.OperationInfo;
import org.apache.tomcat.util.modeler.ParameterInfo;
import org.apache.tomcat.util.modeler.Registry;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
public class MbeansDescriptorsDOMSource extends ModelerSource
{
private static Log log = LogFactory.getLog(MbeansDescriptorsDOMSource.class);
Registry registry;
String location;
String type;
Object source;
List mbeans=new ArrayList();
public void setRegistry(Registry reg) {
this.registry=reg;
}
public void setLocation( String loc ) {
this.location=loc;
}
/** Used if a single component is loaded
*
* @param type
*/
public void setType( String type ) {
this.type=type;
}
public void setSource( Object source ) {
this.source=source;
}
public List loadDescriptors( Registry registry, String location,
String type, Object source)
throws Exception
{
setRegistry(registry);
setLocation(location);
setType(type);
setSource(source);
execute();
return mbeans;
}
public void execute() throws Exception {
if( registry==null ) registry=Registry.getRegistry();
try {
InputStream stream=(InputStream)source;
long t1=System.currentTimeMillis();
Document doc=DomUtil.readXml(stream);
// Ignore for now the name of the root element
Node descriptorsN=doc.getDocumentElement();
//Node descriptorsN=DomUtil.getChild(doc, "mbeans-descriptors");
if( descriptorsN == null ) {
log.error("No descriptors found");
return;
}
Node firstMbeanN=null;
if( "mbean".equals( descriptorsN.getNodeName() ) ) {
firstMbeanN=descriptorsN;
} else {
firstMbeanN=DomUtil.getChild(descriptorsN, "mbean");
}
if( firstMbeanN==null ) {
log.error(" No mbean tags ");
return;
}
// Process each <mbean> element
for (Node mbeanN = firstMbeanN; mbeanN != null;
mbeanN= DomUtil.getNext(mbeanN))
{
// Create a new managed bean info
ManagedBean managed=new ManagedBean();
DomUtil.setAttributes(managed, mbeanN);
Node firstN;
// Process descriptor subnode
/*Node mbeanDescriptorN =
DomUtil.getChild(mbeanN, "descriptor");
if (mbeanDescriptorN != null) {
Node firstFieldN =
DomUtil.getChild(mbeanDescriptorN, "field");
for (Node fieldN = firstFieldN; fieldN != null;
fieldN = DomUtil.getNext(fieldN)) {
FieldInfo fi = new FieldInfo();
DomUtil.setAttributes(fi, fieldN);
managed.addField(fi);
}
}*/
// process attribute nodes
firstN=DomUtil.getChild( mbeanN, "attribute");
for (Node descN = firstN; descN != null;
descN = DomUtil.getNext( descN ))
{
// Create new attribute info
AttributeInfo ai=new AttributeInfo();
DomUtil.setAttributes(ai, descN);
// Process descriptor subnode
/*Node descriptorN =
DomUtil.getChild(descN, "descriptor");
if (descriptorN != null) {
Node firstFieldN =
DomUtil.getChild(descriptorN, "field");
for (Node fieldN = firstFieldN; fieldN != null;
fieldN = DomUtil.getNext(fieldN)) {
FieldInfo fi = new FieldInfo();
DomUtil.setAttributes(fi, fieldN);
ai.addField(fi);
}
}
*/
// Add this info to our managed bean info
managed.addAttribute( ai );
if (log.isTraceEnabled()) {
log.trace("Create attribute " + ai);
}
}
// process constructor nodes
/*
firstN=DomUtil.getChild( mbeanN, "constructor");
for (Node descN = firstN; descN != null;
descN = DomUtil.getNext( descN )) {
// Create new constructor info
ConstructorInfo ci=new ConstructorInfo();
DomUtil.setAttributes(ci, descN);
// Process descriptor subnode
Node firstDescriptorN =
DomUtil.getChild(descN, "descriptor");
if (firstDescriptorN != null) {
Node firstFieldN =
DomUtil.getChild(firstDescriptorN, "field");
for (Node fieldN = firstFieldN; fieldN != null;
fieldN = DomUtil.getNext(fieldN)) {
FieldInfo fi = new FieldInfo();
DomUtil.setAttributes(fi, fieldN);
ci.addField(fi);
}
}
// Process parameter subnodes
Node firstParamN=DomUtil.getChild( descN, "parameter");
for (Node paramN = firstParamN; paramN != null;
paramN = DomUtil.getNext(paramN))
{
ParameterInfo pi=new ParameterInfo();
DomUtil.setAttributes(pi, paramN);
ci.addParameter( pi );
}
// Add this info to our managed bean info
managed.addConstructor( ci );
if (log.isTraceEnabled()) {
log.trace("Create constructor " + ci);
}
}*/
// process notification nodes
firstN=DomUtil.getChild( mbeanN, "notification");
for (Node descN = firstN; descN != null;
descN = DomUtil.getNext( descN ))
{
// Create new notification info
NotificationInfo ni=new NotificationInfo();
DomUtil.setAttributes(ni, descN);
// Process descriptor subnode
/*Node firstDescriptorN =
DomUtil.getChild(descN, "descriptor");
if (firstDescriptorN != null) {
Node firstFieldN =
DomUtil.getChild(firstDescriptorN, "field");
for (Node fieldN = firstFieldN; fieldN != null;
fieldN = DomUtil.getNext(fieldN)) {
FieldInfo fi = new FieldInfo();
DomUtil.setAttributes(fi, fieldN);
ni.addField(fi);
}
}*/
// Process notification-type subnodes
Node firstParamN=DomUtil.getChild( descN, "notification-type");
for (Node paramN = firstParamN; paramN != null;
paramN = DomUtil.getNext(paramN))
{
ni.addNotifType( DomUtil.getContent(paramN) );
}
// Add this info to our managed bean info
managed.addNotification( ni );
if (log.isTraceEnabled()) {
log.trace("Created notification " + ni);
}
}
// process operation nodes
firstN=DomUtil.getChild( mbeanN, "operation");
for (Node descN = firstN; descN != null;
descN = DomUtil.getNext( descN ))
{
// Create new operation info
OperationInfo oi=new OperationInfo();
DomUtil.setAttributes(oi, descN);
// Process descriptor subnode
/*Node firstDescriptorN =
DomUtil.getChild(descN, "descriptor");
if (firstDescriptorN != null) {
Node firstFieldN =
DomUtil.getChild(firstDescriptorN, "field");
for (Node fieldN = firstFieldN; fieldN != null;
fieldN = DomUtil.getNext(fieldN)) {
FieldInfo fi = new FieldInfo();
DomUtil.setAttributes(fi, fieldN);
oi.addField(fi);
}
}*/
// Process parameter subnodes
Node firstParamN=DomUtil.getChild( descN, "parameter");
for (Node paramN = firstParamN; paramN != null;
paramN = DomUtil.getNext(paramN))
{
ParameterInfo pi=new ParameterInfo();
DomUtil.setAttributes(pi, paramN);
if( log.isTraceEnabled())
log.trace("Add param " + pi.getName());
oi.addParameter( pi );
}
// Add this info to our managed bean info
managed.addOperation( oi );
if( log.isTraceEnabled()) {
log.trace("Create operation " + oi);
}
}
// Add the completed managed bean info to the registry
//registry.addManagedBean(managed);
mbeans.add( managed );
}
long t2=System.currentTimeMillis();
log.debug( "Reading descriptors ( dom ) " + (t2-t1));
} catch( Exception ex ) {
log.error( "Error reading descriptors ", ex);
}
}
}
| |
/*
* Copyright 2014 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.index.util;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Scan;
import com.google.common.collect.Maps;
import org.apache.hadoop.hbase.index.ValueGetter;
import org.apache.hadoop.hbase.index.builder.IndexBuildingFailureException;
import org.apache.hadoop.hbase.index.covered.data.LazyValueGetter;
import org.apache.hadoop.hbase.index.covered.update.ColumnReference;
import org.apache.hadoop.hbase.index.scanner.Scanner;
/**
* Utility class to help manage indexes
*/
public class IndexManagementUtil {
private IndexManagementUtil() {
// private ctor for util classes
}
// Don't rely on statically defined classes constants from classes that may not exist
// in earlier HBase versions
public static final String INDEX_WAL_EDIT_CODEC_CLASS_NAME = "org.apache.hadoop.hbase.regionserver.wal.IndexedWALEditCodec";
public static final String HLOG_READER_IMPL_KEY = "hbase.regionserver.hlog.reader.impl";
public static final String WAL_EDIT_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec";
private static final String INDEX_HLOG_READER_CLASS_NAME = "org.apache.hadoop.hbase.regionserver.wal.IndexedHLogReader";
private static final Log LOG = LogFactory.getLog(IndexManagementUtil.class);
public static boolean isWALEditCodecSet(Configuration conf) {
// check to see if the WALEditCodec is installed
try {
// Use reflection to load the IndexedWALEditCodec, since it may not load with an older version
// of HBase
Class.forName(INDEX_WAL_EDIT_CODEC_CLASS_NAME);
} catch (Throwable t) {
return false;
}
if (INDEX_WAL_EDIT_CODEC_CLASS_NAME.equals(conf.get(WAL_EDIT_CODEC_CLASS_KEY, null))) {
// its installed, and it can handle compression and non-compression cases
return true;
}
return false;
}
public static void ensureMutableIndexingCorrectlyConfigured(Configuration conf) throws IllegalStateException {
// check to see if the WALEditCodec is installed
if (isWALEditCodecSet(conf)) { return; }
// otherwise, we have to install the indexedhlogreader, but it cannot have compression
String codecClass = INDEX_WAL_EDIT_CODEC_CLASS_NAME;
String indexLogReaderName = INDEX_HLOG_READER_CLASS_NAME;
try {
// Use reflection to load the IndexedHLogReader, since it may not load with an older version
// of HBase
Class.forName(indexLogReaderName);
} catch (ClassNotFoundException e) {
throw new IllegalStateException(codecClass + " is not installed, but "
+ indexLogReaderName + " hasn't been installed in hbase-site.xml under " + HLOG_READER_IMPL_KEY);
}
if (indexLogReaderName.equals(conf.get(HLOG_READER_IMPL_KEY, indexLogReaderName))) {
if (conf.getBoolean(HConstants.ENABLE_WAL_COMPRESSION, false)) { throw new IllegalStateException(
"WAL Compression is only supported with " + codecClass
+ ". You can install in hbase-site.xml, under " + WAL_EDIT_CODEC_CLASS_KEY); }
} else {
throw new IllegalStateException(codecClass + " is not installed, but "
+ indexLogReaderName + " hasn't been installed in hbase-site.xml under " + HLOG_READER_IMPL_KEY);
}
}
public static ValueGetter createGetterFromKeyValues(Collection<KeyValue> pendingUpdates) {
final Map<ReferencingColumn, ImmutableBytesPtr> valueMap = Maps.newHashMapWithExpectedSize(pendingUpdates
.size());
for (KeyValue kv : pendingUpdates) {
// create new pointers to each part of the kv
ImmutableBytesPtr family = new ImmutableBytesPtr(kv.getBuffer(), kv.getFamilyOffset(), kv.getFamilyLength());
ImmutableBytesPtr qual = new ImmutableBytesPtr(kv.getBuffer(), kv.getQualifierOffset(),
kv.getQualifierLength());
ImmutableBytesPtr value = new ImmutableBytesPtr(kv.getBuffer(), kv.getValueOffset(), kv.getValueLength());
valueMap.put(new ReferencingColumn(family, qual), value);
}
return new ValueGetter() {
@Override
public ImmutableBytesPtr getLatestValue(ColumnReference ref) throws IOException {
return valueMap.get(ReferencingColumn.wrap(ref));
}
};
}
private static class ReferencingColumn {
ImmutableBytesPtr family;
ImmutableBytesPtr qual;
static ReferencingColumn wrap(ColumnReference ref) {
ImmutableBytesPtr family = new ImmutableBytesPtr(ref.getFamily());
ImmutableBytesPtr qual = new ImmutableBytesPtr(ref.getQualifier());
return new ReferencingColumn(family, qual);
}
public ReferencingColumn(ImmutableBytesPtr family, ImmutableBytesPtr qual) {
this.family = family;
this.qual = qual;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((family == null) ? 0 : family.hashCode());
result = prime * result + ((qual == null) ? 0 : qual.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (getClass() != obj.getClass()) return false;
ReferencingColumn other = (ReferencingColumn)obj;
if (family == null) {
if (other.family != null) return false;
} else if (!family.equals(other.family)) return false;
if (qual == null) {
if (other.qual != null) return false;
} else if (!qual.equals(other.qual)) return false;
return true;
}
}
public static ValueGetter createGetterFromScanner(Scanner scanner, byte[] currentRow) {
return new LazyValueGetter(scanner, currentRow);
}
/**
* check to see if the kvs in the update match any of the passed columns. Generally, this is useful to for an index
* codec to determine if a given update should even be indexed. This assumes that for any index, there are going to
* small number of columns, versus the number of kvs in any one batch.
*/
public static boolean updateMatchesColumns(Collection<KeyValue> update, List<ColumnReference> columns) {
// check to see if the kvs in the new update even match any of the columns requested
// assuming that for any index, there are going to small number of columns, versus the number of
// kvs in any one batch.
boolean matches = false;
outer: for (KeyValue kv : update) {
for (ColumnReference ref : columns) {
if (ref.matchesFamily(kv.getFamily()) && ref.matchesQualifier(kv.getQualifier())) {
matches = true;
// if a single column matches a single kv, we need to build a whole scanner
break outer;
}
}
}
return matches;
}
/**
* Check to see if the kvs in the update match any of the passed columns. Generally, this is useful to for an index
* codec to determine if a given update should even be indexed. This assumes that for any index, there are going to
* small number of kvs, versus the number of columns in any one batch.
* <p>
* This employs the same logic as {@link #updateMatchesColumns(Collection, List)}, but is flips the iteration logic
* to search columns before kvs.
*/
public static boolean columnMatchesUpdate(List<ColumnReference> columns, Collection<KeyValue> update) {
boolean matches = false;
outer: for (ColumnReference ref : columns) {
for (KeyValue kv : update) {
if (ref.matchesFamily(kv.getFamily()) && ref.matchesQualifier(kv.getQualifier())) {
matches = true;
// if a single column matches a single kv, we need to build a whole scanner
break outer;
}
}
}
return matches;
}
public static Scan newLocalStateScan(List<? extends Iterable<? extends ColumnReference>> refsArray) {
Scan s = new Scan();
s.setRaw(true);
// add the necessary columns to the scan
for (Iterable<? extends ColumnReference> refs : refsArray) {
for (ColumnReference ref : refs) {
s.addFamily(ref.getFamily());
}
}
s.setMaxVersions();
return s;
}
/**
* Propagate the given failure as a generic {@link IOException}, if it isn't already
*
* @param e
* reason indexing failed. If ,tt>null</tt>, throws a {@link NullPointerException}, which should unload
* the coprocessor.
*/
public static void rethrowIndexingException(Throwable e) throws IOException {
try {
throw e;
} catch (IOException e1) {
LOG.info("Rethrowing " + e);
throw e1;
} catch (Throwable e1) {
LOG.info("Rethrowing " + e1 + " as a " + IndexBuildingFailureException.class.getSimpleName());
throw new IndexBuildingFailureException("Failed to build index for unexpected reason!", e1);
}
}
public static void setIfNotSet(Configuration conf, String key, int value) {
if (conf.get(key) == null) {
conf.setInt(key, value);
}
}
}
| |
/*
Copyright 2012, Jernej Kovacic
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.jkovacic.cryptoutil;
import java.util.*;
import com.jkovacic.util.*;
/**
* A class with implementation of a bubble - babble encoding.
* The encoding is defined at http://wiki.yak.net/589 and
* http://wiki.yak.net/589/Bubble_Babble_Encoding.txt
*
* This encoding is often used at SSH (for server verification).
* Note that this class does not perform SHA-1 hashing so you
* must do it yourself beforehand.
*
* @author Jernej Kovacic
*
* @see Base64
*/
public class BubbleBabble
{
// auxiliary char arrays, used by encoding algorithm
private static final char[] VOWELS = "aeiouy".toCharArray();
private static final char[] CONSONANTS = "bcdfghklmnprstvzx".toCharArray();
// result for an empty input
private static final char[] EMPTY_INPUT = "xexax".toCharArray();
/**
* Calculates a bubble - babble encoding of the input blob
*
* @param blob - a blob to be encoded
* @return bubble - babble encoded blob (exact nr. of the array is allocated)
*/
public static char[] encode(byte[] blob)
{
// sanity check
// consider null blob as an empty string, which returns "xexax"
if ( null == blob )
{
return EMPTY_INPUT;
}
/*
required buffer length:
5 * floor(blob.length/2) (full 5-letter words)
+ 5 (additional 5-letter word, composed of a 3 digit tupple)
+ floor(blob.length/2) (dashes, separating 1+blob.length/2 "words")
Total: 6 * floor(blob.length/2) + 5
*/
char[] retVal = new char[6 * (blob.length/2) + 5];
Arrays.fill(retVal, '\u0000');
// auxiliary string for a 5 letter "word" and a dash
char [] auxstr = new char[6];
int seed = 1;
int pos = 1;
int byte1;
int byte2;
retVal[0] = 'x';
for ( int i=0 ;; i+=2 )
{
if ( i >= blob.length )
{
auxstr[0] = VOWELS[seed%6];
auxstr[1] = CONSONANTS[16];
auxstr[2] = VOWELS[seed/6];
System.arraycopy(auxstr, 0, retVal, pos, 3);
pos += 3;
break; // out of for i
}
byte1 = blob[i] & 0xff;
auxstr[0] = VOWELS[(((byte1>>6)&3)+seed)%6];
auxstr[1] = CONSONANTS[(byte1>>2)&15];
auxstr[2] = VOWELS[((byte1&3)+(seed/6))%6];
if ( i+1 >= blob.length )
{
System.arraycopy(auxstr, 0, retVal, pos, 3);
pos += 3;
break; // out of for i
}
byte2 = blob[i+1] & 0xff;
auxstr[3] = CONSONANTS[(byte2>>4)&15];
auxstr[4] = '-';
auxstr[5] = CONSONANTS[byte2&15];
System.arraycopy(auxstr, 0, retVal, pos, 6);
pos += 6;
seed = (seed*5 + byte1*7 + byte2)%36;
} // for i
retVal[pos] = 'x';
return retVal;
}
/**
* Verifies if the given char sequence COULD represent a bubble-babble encoded
* blob. Only the sequence's length and "type" of characters (valid vowels or consonants)
* at each position are checked. Checksums are not checked. As this a part of the decoding
* process, decode() should be run for the complete verification.
*
* @param bb - char array to be verified
*
* @return true/false
*/
public static boolean validBubbleBabble(char[] bb)
{
// sanity check
if ( null==bb )
{
return false;
}
/*
* The valid length is n*6+5:
* n five-char tuples + '-'
* plus additional five-char tuple without the '-'
*/
if ( (bb.length%6)!=5 )
{
return false;
}
/*
* The valid bubble babble string always begins and ends with 'x'
*/
if ( bb[0]!='x' || bb[bb.length-1]!='x' )
{
return false;
}
boolean retVal = true;
// check if expected characters are placed to their appropriate positions
for ( int i=0; i<bb.length; i++ )
{
// Positions of 5, 11, 17, etc. are reserved for dashes
if ( 5 == (i%6) )
{
if ( '-' != bb[i] )
{
retVal = false;
break; // out of for i
}
// proceed to the next character
continue; // for i
}
if ( 0 == i%2 )
{
// valid consonants are expected at even-numbered positions (0, 2, 4, 6, 8, etc
if ( LinearSearch.search(CONSONANTS, bb[i]) < 0 )
{
retVal = false;
break;
}
}
else if ( LinearSearch.search(VOWELS, bb[i]) < 0 )
{
// while valid vowels are expected at odd-numbered positions (1, 3, 7, etc.)
retVal = false;
break;
}
}
return retVal;
}
/*
* Decodes the first part (the first 3 characters) of a bubble-babble tuple into a byte value.
* Parameters represent values of the typical bubble-babble tuple: <a1 a2 a3 a4 - a5>.
*
* @param a1 - first value of the bubble-babble tuple
* @param a2 - second value of the bubble-babble tuple
* @param a3 - third value of the bubble-babble tuple
* @param c - bubble-babble checksum, depending on previous tuples
*
* @return byte value of the bubble-babble parameters or -1 in case of invalid values
*/
private static int decodeGroupOf3(int a1, int a2, int a3, int c)
{
int retVal = 0;
// 2 most significant bits of retVal:
int first = (a1 - (c%6) + 6) % 6;
// value check
if ( first>=4 || a2>16 )
{
return -1;
}
// 4 central bits of retVal
int second = a2;
// and 2 least significant bits of retVal:
int third = (a3 - (c/6%6) + 6) % 6;
// value check
if ( third>=4 )
{
return -1;
}
// finally compose the retVal
retVal = first<<6 | second<<2 | third;
return retVal;
}
/*
* Decodes the second part (the final 2 characters) of a bubble-babble tuple into a byte value.
* Parameters represent values of the typical bubble-babble tuple: <a1 a2 a3 a4 - a5>.
*
* @param a4 - fourth value of the bubble-babble tuple
* @param a5 - fifth value of the bubble-babble tuple
*
* @return byte value of the bubble-babble parameters or -1 in case of invalid values
*/
private static int decodeGroupOf2(int a4, int a5)
{
int retVal = 0;
// value check
if ( a4>16 || a5>16 )
{
return -1;
}
// compose the retVal, i.e 4 bytes from each parameter:
retVal = (a4<<4) | a5;
return retVal;
}
/**
* Decodes a char sequence, presumably representing a bubble-babble encoding,
* into an array of bytes.
*
* @param bb - char sequence to be decoded
*
* @return corresponding byte array or 'null' in case of invalid 'bb'
*/
public static byte[] decode(char[] bb)
{
// Decoding process is inverse to encoding.
// sanity check
if ( null==bb || false==validBubbleBabble(bb) )
{
return null;
}
/*
* As derived in encode(), the bubble-babble string's length
* equals to: bb.length = 6 * floor(blob.length) + 5
* From this, the following relation can be derived:
* floor(blob.length) = (bb.length - 5) / 6
*
* Two values of the inknown blob.length solve the equation:
* (bb.length - 5) / 3 and
* 1 + (bb.length - 5) / 3
*
* As it will be shown later, the actual solution is determined
* by one bubble babble character.
*/
int retLen = (bb.length-5)/3;
// if the third character from the end equals 'x', no extra byte will be appended
if ( 'x' != bb[bb.length-3] )
{
retLen++;
}
// now the total retVal's length is known
byte[] retVal = new byte[retLen];
// checksum:
int checksum = 1;
int byte1 = 0;
int byte2 = 0;
// current position inside retVal:
int retPos = 0;
// numeric values of the a <a1 a2 a3 a4 - a5> tuple:
int a1, a2, a3, a4, a5;
// convert all complete tuples (i.e. all except the last one)
// into a pair of characters:
for ( int i=0; i<bb.length/6; i++ )
{
// get numeric values of the tuple:
a1 = LinearSearch.search(VOWELS, bb[i*6+1]);
a2 = LinearSearch.search(CONSONANTS, bb[i*6+2]);
a3 = LinearSearch.search(VOWELS, bb[i*6+3]);
a4 = LinearSearch.search(CONSONANTS, bb[i*6+4]);
a5 = LinearSearch.search(CONSONANTS, bb[i*6+6]);
// and decode them into a pair of characters:
byte1 = decodeGroupOf3(a1, a2, a3, checksum);
byte2 = decodeGroupOf2(a4, a5);
// if a tuple was invalid, the corresponding byte will be set to -1
if ( byte1<0 || byte2<0 )
{
return null;
}
// update the checksum:
checksum = (checksum*5 + byte1*7 + byte2) % 36;
// and finally "append" the bytes to retVal:
retVal[retPos++] = (byte) (byte1 & 0xff);
retVal[retPos++] = (byte) (byte2 & 0xff);
}
// The final tuple consists of three bytes only and must
// be processed a bit differently.
a1 = LinearSearch.search(VOWELS, bb[bb.length-4]);
a2 = LinearSearch.search(CONSONANTS, bb[bb.length-3]);
a3 = LinearSearch.search(VOWELS, bb[bb.length-2]);
// If the third character from the bb's end equals 'x' (its numeric value is 16),
// no extra character will be appended, just check the checksums:
if ( 16==a2 )
{
if ( a1!= (checksum%6) || a3!=(checksum/6) )
{
return null;
}
}
else
{
// otherwise decode one more character and append it to retVal
byte1 = decodeGroupOf3(a1, a2, a3, checksum);
if ( byte1<0 )
{
return null;
}
retVal[retPos++] = (byte) (byte1 & 0xff);
}
return retVal;
}
/*
A unit testing function that encodes a few test strings
*/
public static void main(String[] args)
{
/*
* Test vectors, taken from http://wiki.yak.net/589
* Resulting strings are "xigak-nyryk-humil-bosek-sonax",
* "xesef-disof-gytuf-katof-movif-baxux" and "xexax", respectively.
* The bubble-babble encodings are decoded back to original test vectors:
*/
String[] tests = { "Pineapple", "1234567890", ""};
byte[] input;
char[] output;
String orig;
for ( String test : tests )
{
input = test.getBytes();
output = BubbleBabble.encode(input);
orig = new String(BubbleBabble.decode(output));
System.out.print(test + " --> ");
for ( char ch : output )
{
System.out.print(ch);
}
System.out.print(" --> ");
System.out.println(orig);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.view;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.FutureCallback;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.concurrent.ScheduledExecutors;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.SystemKeyspace;
import org.apache.cassandra.db.compaction.CompactionInterruptedException;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.locator.RangesAtEndpoint;
import org.apache.cassandra.locator.Replicas;
import org.apache.cassandra.schema.SystemDistributedKeyspace;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.Pair;
import org.apache.cassandra.utils.concurrent.Future;
import org.apache.cassandra.utils.concurrent.FutureCombiner;
import org.apache.cassandra.utils.concurrent.ImmediateFuture;
import static java.util.stream.Collectors.toList;
/**
* Builds a materialized view for the local token ranges.
* <p>
* The build is split in at least {@link #NUM_TASKS} {@link ViewBuilderTask tasks}, suitable of being parallelized by
* the {@link CompactionManager} which will execute them.
*/
class ViewBuilder
{
private static final Logger logger = LoggerFactory.getLogger(ViewBuilder.class);
private static final int NUM_TASKS = Runtime.getRuntime().availableProcessors() * 4;
private final ColumnFamilyStore baseCfs;
private final View view;
private final String ksName;
private final UUID localHostId = SystemKeyspace.getOrInitializeLocalHostId();
private final Set<Range<Token>> builtRanges = Sets.newConcurrentHashSet();
private final Map<Range<Token>, Pair<Token, Long>> pendingRanges = Maps.newConcurrentMap();
private final Set<ViewBuilderTask> tasks = Sets.newConcurrentHashSet();
private volatile long keysBuilt = 0;
private volatile boolean isStopped = false;
private volatile Future<?> future = ImmediateFuture.success(null);
ViewBuilder(ColumnFamilyStore baseCfs, View view)
{
this.baseCfs = baseCfs;
this.view = view;
ksName = baseCfs.metadata.keyspace;
}
public void start()
{
if (SystemKeyspace.isViewBuilt(ksName, view.name))
{
logger.debug("View already marked built for {}.{}", ksName, view.name);
if (!SystemKeyspace.isViewStatusReplicated(ksName, view.name))
updateDistributed();
}
else
{
SystemDistributedKeyspace.startViewBuild(ksName, view.name, localHostId);
logger.debug("Starting build of view({}.{}). Flushing base table {}.{}",
ksName, view.name, ksName, baseCfs.name);
baseCfs.forceBlockingFlush();
loadStatusAndBuild();
}
}
private void loadStatusAndBuild()
{
loadStatus();
build();
}
private void loadStatus()
{
builtRanges.clear();
pendingRanges.clear();
SystemKeyspace.getViewBuildStatus(ksName, view.name)
.forEach((range, pair) ->
{
Token lastToken = pair.left;
if (lastToken != null && lastToken.equals(range.right))
{
builtRanges.add(range);
keysBuilt += pair.right;
}
else
{
pendingRanges.put(range, pair);
}
});
}
private synchronized void build()
{
if (isStopped)
{
logger.debug("Stopped build for view({}.{}) after covering {} keys", ksName, view.name, keysBuilt);
return;
}
// Get the local ranges for which the view hasn't already been built nor it's building
RangesAtEndpoint replicatedRanges = StorageService.instance.getLocalReplicas(ksName);
Replicas.temporaryAssertFull(replicatedRanges);
Set<Range<Token>> newRanges = replicatedRanges.ranges()
.stream()
.map(r -> r.subtractAll(builtRanges))
.flatMap(Set::stream)
.map(r -> r.subtractAll(pendingRanges.keySet()))
.flatMap(Set::stream)
.collect(Collectors.toSet());
// If there are no new nor pending ranges we should finish the build
if (newRanges.isEmpty() && pendingRanges.isEmpty())
{
finish();
return;
}
// Split the new local ranges and add them to the pending set
DatabaseDescriptor.getPartitioner()
.splitter()
.map(s -> s.split(newRanges, NUM_TASKS))
.orElse(newRanges)
.forEach(r -> pendingRanges.put(r, Pair.<Token, Long>create(null, 0L)));
// Submit a new view build task for each building range.
// We keep record of all the submitted tasks to be able of stopping them.
List<Future<Long>> futures = pendingRanges.entrySet()
.stream()
.map(e -> new ViewBuilderTask(baseCfs,
view,
e.getKey(),
e.getValue().left,
e.getValue().right))
.peek(tasks::add)
.map(CompactionManager.instance::submitViewBuilder)
.collect(toList());
// Add a callback to process any eventual new local range and mark the view as built, doing a delayed retry if
// the tasks don't succeed
Future<List<Long>> future = FutureCombiner.allOf(futures);
future.addCallback(new FutureCallback<List<Long>>()
{
public void onSuccess(List<Long> result)
{
keysBuilt += result.stream().mapToLong(x -> x).sum();
builtRanges.addAll(pendingRanges.keySet());
pendingRanges.clear();
build();
}
public void onFailure(Throwable t)
{
if (t instanceof CompactionInterruptedException)
{
internalStop(true);
keysBuilt = tasks.stream().mapToLong(ViewBuilderTask::keysBuilt).sum();
logger.info("Interrupted build for view({}.{}) after covering {} keys", ksName, view.name, keysBuilt);
}
else
{
ScheduledExecutors.nonPeriodicTasks.schedule(() -> loadStatusAndBuild(), 5, TimeUnit.MINUTES);
logger.warn("Materialized View failed to complete, sleeping 5 minutes before restarting", t);
}
}
});
this.future = future;
}
private void finish()
{
logger.debug("Marking view({}.{}) as built after covering {} keys ", ksName, view.name, keysBuilt);
SystemKeyspace.finishViewBuildStatus(ksName, view.name);
updateDistributed();
}
private void updateDistributed()
{
try
{
SystemDistributedKeyspace.successfulViewBuild(ksName, view.name, localHostId);
SystemKeyspace.setViewBuiltReplicated(ksName, view.name);
}
catch (Exception e)
{
ScheduledExecutors.nonPeriodicTasks.schedule(this::updateDistributed, 5, TimeUnit.MINUTES);
logger.warn("Failed to update the distributed status of view, sleeping 5 minutes before retrying", e);
}
}
/**
* Stops the view building.
*/
void stop()
{
boolean wasStopped;
synchronized (this)
{
wasStopped = isStopped;
internalStop(false);
}
// TODO: very unclear what the goal is here. why do we wait only if we were the first to invoke stop?
// but we wait outside the synchronized block to avoid a deadlock with `build` in the future callback
if (!wasStopped)
FBUtilities.waitOnFuture(future);
}
private void internalStop(boolean isCompactionInterrupted)
{
isStopped = true;
tasks.forEach(task -> task.stop(isCompactionInterrupted));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* Copyright 2013 Near Infinity Corporation.
*/
/**
* This file is licensed to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.nearinfinity.honeycomb;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.NoSuchElementException;
import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.client.RowLock;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.Filter.ReturnCode;
import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
import org.apache.hadoop.hbase.util.Bytes;
/**
* Mock implementation of HTableInterface. Holds any supplied data in a
* multi-dimensional NavigableMap which acts as a in-memory database. Useful for
* testing classes that operate on data using an HTableInterface.
* <p>
* Instances should be get using <code>MockHTable.create()</code>. So while a
* DAO with a saving operation like
*
* <pre>
* public class MyDAO {
* private HTableInterface table;
*
* public MyDAO(HTableInterface table) {
* this.table = table;
* }
*
* public void saveData(byte[] id, byte[] data) throws IOException{
* Put put = new Put(id)
* put.add(family, qualifier, data);
* table.put(put);
* }
* }
* </pre>
* <p>
* is used in production like
*
* <pre>
* MyDAO(new HTable(conf, tableName)).saveData(id, data);
* </pre>
* <p>
* can be tested like
*
* <pre>
* @Test
* public void testSave() {
* MockHTable table = MockHTable.create();
* MyDAO(table).saveData(id, data);
* Get get = new Get(id);
* Result result = table.get(get);
* assertArrayEquals(data, result.getValue(family, qualifier));
* }
* </pre>
* <p>
* MockHTable instances can also be initialized with pre-loaded data using one
* of the String[][] or Map<String, Map<String, String>> data formats. While
* String[][] parameter lets directly loading data from source code, Map can be
* generated from a YAML document, using a parser.
*
* <pre>
* // String[][]
* MockHTable table = MockHTable.with(new String[][] {
* { "<rowid>", "<column>", "<value>" },
* { "id", "family:qualifier1", "data1" },
* { "id", "family:qualifier2", "data2" }
* });
* // YAML
* String database = "id:\n family:qualifier1: data1\n family:qualifier2: data2\n";
* MockHTable table = MockHTable.with((Map<String, Map<String, String>) new Yaml().load(database));
* </pre>
* <p>
* If value is not supposed to be a String, but an int, double or anything,
* <code>MockHTable.toEString()</code> can be used to turn it into a String.
*
* <p>
* In order to simplify assertions for tests that should put anything into
* database, MockHTable.read() works with two parameters (id and column) and
* returns anything written to that row/column. So, previous test can be reduced to
*
* <pre>
* @Test
* public void testSave() {
* MockHTable table = MockHTable.create();
* MyDAO(table).saveData(id, data);
* assertArrayEquals(data, table.read(id, "family:qualifier"));
* }
* </pre>
* <p>
*
* @author erdem
*
*/
public class MockHTable implements HTableInterface {
/**
* This is all the data for a MockHTable instance
*/
private final NavigableMap<byte[], NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>> data = new TreeMap<byte[], NavigableMap<byte[],NavigableMap<byte[],NavigableMap<Long,byte[]>>>>(Bytes.BYTES_COMPARATOR);
/**
* Helper method to convert some data into a list of KeyValue's
*
* @param row
* row value of the KeyValue's
* @param rowdata
* data to decode
* @param maxVersions
* number of versions to return
* @return List of KeyValue's
*/
private static List<KeyValue> toKeyValue(byte[] row, NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, int maxVersions){
return toKeyValue(row, rowdata, 0, Long.MAX_VALUE, maxVersions);
}
/**
* Helper method to convert some data into a list of KeyValue's with timestamp
* constraint
*
* @param row
* row value of the KeyValue's
* @param rowdata
* data to decode
* @param timestampStart
* start of the timestamp constraint
* @param timestampEnd
* end of the timestamp constraint
* @param maxVersions
* number of versions to return
* @return List of KeyValue's
*/
private static List<KeyValue> toKeyValue(byte[] row, NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowdata, long timestampStart, long timestampEnd, int maxVersions){
List<KeyValue> ret = new ArrayList<KeyValue>();
for (byte[] family : rowdata.keySet())
for (byte[] qualifier : rowdata.get(family).keySet()) {
int versionsAdded = 0;
for (Entry<Long, byte[]> tsToVal : rowdata.get(family).get(qualifier).descendingMap().entrySet()){
if (versionsAdded++ == maxVersions)
break;
Long timestamp = tsToVal.getKey();
if (timestamp < timestampStart)
continue;
if (timestamp > timestampEnd)
continue;
byte[] value = tsToVal.getValue();
ret.add(new KeyValue(row, family, qualifier, timestamp, value));
}
}
return ret;
}
/**
* Clients should not rely on table names so this returns null.
* @return null
*/
@Override
public byte[] getTableName() { return null; }
/**
* No configuration needed to work so this returns null.
* @return null
*/
@Override
public Configuration getConfiguration() { return null; }
/**
* No table descriptor needed so this returns null.
* @return null
*/
@Override
public HTableDescriptor getTableDescriptor() { return null; }
@Override
public boolean exists(Get get) throws IOException {
if(get.getFamilyMap() == null || get.getFamilyMap().size() == 0) {
return data.containsKey(get.getRow());
} else {
byte[] row = get.getRow();
if(!data.containsKey(row)) {
return false;
}
for(byte[] family : get.getFamilyMap().keySet()) {
if(!data.get(row).containsKey(family)) {
return false;
} else {
for(byte[] qualifier : get.getFamilyMap().get(family)) {
if(!data.get(row).get(family).containsKey(qualifier)) {
return false;
}
}
}
}
return true;
}
}
@Override
public Result get(Get get) throws IOException {
if (!data.containsKey(get.getRow()))
return new Result();
byte[] row = get.getRow();
List<KeyValue> kvs = new ArrayList<KeyValue>();
if (!get.hasFamilies()) {
kvs = toKeyValue(row, data.get(row), get.getMaxVersions());
} else {
for (byte[] family : get.getFamilyMap().keySet()){
if (data.get(row).get(family) == null)
continue;
NavigableSet<byte[]> qualifiers = get.getFamilyMap().get(family);
if (qualifiers == null || qualifiers.isEmpty())
qualifiers = data.get(row).get(family).navigableKeySet();
for (byte[] qualifier : qualifiers){
if (qualifier == null)
qualifier = "".getBytes();
if (!data.get(row).containsKey(family) ||
!data.get(row).get(family).containsKey(qualifier) ||
data.get(row).get(family).get(qualifier).isEmpty())
continue;
Entry<Long, byte[]> timestampAndValue = data.get(row).get(family).get(qualifier).lastEntry();
kvs.add(new KeyValue(row,family, qualifier, timestampAndValue.getKey(), timestampAndValue.getValue()));
}
}
}
Filter filter = get.getFilter();
if (filter != null) {
filter.reset();
List<KeyValue> nkvs = new ArrayList<KeyValue>(kvs.size());
for (KeyValue kv : kvs) {
if (filter.filterAllRemaining()) {
break;
}
if (filter.filterRowKey(kv.getBuffer(), kv.getRowOffset(), kv.getRowLength())) {
continue;
}
if (filter.filterKeyValue(kv) == ReturnCode.INCLUDE) {
nkvs.add(kv);
}
// ignoring next key hint which is a optimization to reduce file system IO
}
if (filter.hasFilterRow()) {
filter.filterRow(nkvs);
}
kvs = nkvs;
}
return new Result(kvs);
}
@Override
public Result getRowOrBefore(byte[] row, byte[] family) throws IOException {
// FIXME: implement
return null;
}
@Override
public ResultScanner getScanner(Scan scan) throws IOException {
final List<Result> ret = new ArrayList<Result>();
byte[] st = scan.getStartRow();
byte[] sp = scan.getStopRow();
Filter filter = scan.getFilter();
for (byte[] row : data.keySet()){
// if row is equal to startRow emit it. When startRow (inclusive) and
// stopRow (exclusive) is the same, it should not be excluded which would
// happen w/o this control.
if (st != null && st.length > 0 &&
Bytes.BYTES_COMPARATOR.compare(st, row) != 0) {
// if row is before startRow do not emit, pass to next row
if (st != null && st.length > 0 &&
Bytes.BYTES_COMPARATOR.compare(st, row) > 0)
continue;
// if row is equal to stopRow or after it do not emit, stop iteration
if (sp != null && sp.length > 0 &&
Bytes.BYTES_COMPARATOR.compare(sp, row) <= 0)
break;
}
List<KeyValue> kvs = null;
if (!scan.hasFamilies()) {
kvs = toKeyValue(row, data.get(row), scan.getTimeRange().getMin(), scan.getTimeRange().getMax(), scan.getMaxVersions());
} else {
kvs = new ArrayList<KeyValue>();
for (byte[] family : scan.getFamilyMap().keySet()){
if (data.get(row).get(family) == null)
continue;
NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(family);
if (qualifiers == null || qualifiers.isEmpty())
qualifiers = data.get(row).get(family).navigableKeySet();
for (byte[] qualifier : qualifiers){
if (data.get(row).get(family).get(qualifier) == null)
continue;
for (Long timestamp : data.get(row).get(family).get(qualifier).descendingKeySet()){
if (timestamp < scan.getTimeRange().getMin())
continue;
if (timestamp > scan.getTimeRange().getMax())
continue;
byte[] value = data.get(row).get(family).get(qualifier).get(timestamp);
kvs.add(new KeyValue(row, family, qualifier, timestamp, value));
if(kvs.size() == scan.getMaxVersions()) {
break;
}
}
}
}
}
if (filter != null) {
filter.reset();
List<KeyValue> nkvs = new ArrayList<KeyValue>(kvs.size());
for (KeyValue kv : kvs) {
if (filter.filterAllRemaining()) {
break;
}
if (filter.filterRowKey(kv.getBuffer(), kv.getRowOffset(), kv.getRowLength())) {
continue;
}
ReturnCode filterResult = filter.filterKeyValue(kv);
if (filterResult == ReturnCode.INCLUDE) {
nkvs.add(kv);
} else if (filterResult == ReturnCode.NEXT_ROW) {
break;
}
// ignoring next key hint which is a optimization to reduce file system IO
}
if (filter.hasFilterRow()) {
filter.filterRow(nkvs);
}
kvs = nkvs;
}
if (!kvs.isEmpty()) {
ret.add(new Result(kvs));
}
}
return new ResultScanner() {
private final Iterator<Result> iterator = ret.iterator();
@Override
public Iterator<Result> iterator() {
return iterator;
}
@Override
public Result[] next(int nbRows) throws IOException {
ArrayList<Result> resultSets = new ArrayList<Result>(nbRows);
for(int i = 0; i < nbRows; i++) {
Result next = next();
if (next != null) {
resultSets.add(next);
} else {
break;
}
}
return resultSets.toArray(new Result[resultSets.size()]);
}
@Override
public Result next() throws IOException {
try {
return iterator().next();
} catch (NoSuchElementException e) {
return null;
}
}
@Override
public void close() {}
};
}
@Override
public ResultScanner getScanner(byte[] family) throws IOException {
Scan scan = new Scan();
scan.addFamily(family);
return getScanner(scan);
}
@Override
public ResultScanner getScanner(byte[] family, byte[] qualifier)
throws IOException {
Scan scan = new Scan();
scan.addColumn(family, qualifier);
return getScanner(scan);
}
@Override
public void put(Put put) throws IOException {
byte[] row = put.getRow();
NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> rowData = forceFind(data, row, new TreeMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>(Bytes.BYTES_COMPARATOR));
for (byte[] family : put.getFamilyMap().keySet()){
NavigableMap<byte[], NavigableMap<Long, byte[]>> familyData = forceFind(rowData, family, new TreeMap<byte[], NavigableMap<Long, byte[]>>(Bytes.BYTES_COMPARATOR));
for (KeyValue kv : put.getFamilyMap().get(family)){
kv.updateLatestStamp(Bytes.toBytes(System.currentTimeMillis()));
byte[] qualifier = kv.getQualifier();
NavigableMap<Long, byte[]> qualifierData = forceFind(familyData, qualifier, new TreeMap<Long, byte[]>());
qualifierData.put(kv.getTimestamp(), kv.getValue());
}
}
}
/**
* Helper method to find a key in a map. If key is not found, newObject is
* added to map and returned
*
* @param map
* map to extract value from
* @param key
* key to look for
* @param newObject
* set key to this if not found
* @return found value or newObject if not found
*/
private <K, V> V forceFind(NavigableMap<K, V> map, K key, V newObject){
V data = map.get(key);
if (data == null){
data = newObject;
map.put(key, data);
}
return data;
}
@Override
public void put(List<Put> puts) throws IOException {
for (Put put : puts)
put(put);
}
/**
* Checks if the value with given details exists in database, or is
* non-existent in the case of value being null
*
* @param row
* row
* @param family
* family
* @param qualifier
* qualifier
* @param value
* value
* @return true if value is not null and exists in db, or value is null and
* not exists in db, false otherwise
*/
private boolean check(byte[] row, byte[] family, byte[] qualifier, byte[] value){
if (value == null || value.length == 0)
return ! data.containsKey(row) ||
! data.get(row).containsKey(family) ||
! data.get(row).get(family).containsKey(qualifier);
return data.containsKey(row) &&
data.get(row).containsKey(family) &&
data.get(row).get(family).containsKey(qualifier) &&
! data.get(row).get(family).get(qualifier).isEmpty() &&
Arrays.equals(data.get(row).get(family).get(qualifier).lastEntry().getValue(), value);
}
@Override
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Put put) throws IOException {
if (check(row, family, qualifier, value)){
put(put);
return true;
}
return false;
}
@Override
public void delete(Delete delete) throws IOException {
byte[] row = delete.getRow();
if (data.get(row) == null)
return;
if (delete.getFamilyMap().size() == 0){
data.remove(row);
return;
}
for (byte[] family : delete.getFamilyMap().keySet()){
if (data.get(row).get(family) == null)
continue;
if (delete.getFamilyMap().get(family).isEmpty()){
data.get(row).remove(family);
continue;
}
for (KeyValue kv : delete.getFamilyMap().get(family)){
data.get(row).get(kv.getFamily()).remove(kv.getQualifier());
}
if(data.get(row).get(family).isEmpty()) {
data.get(row).remove(family);
}
}
if(data.get(row).isEmpty()) {
data.remove(row);
}
}
@Override
public void delete(List<Delete> deletes) throws IOException {
for (Delete delete : deletes)
delete(delete);
}
@Override
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Delete delete) throws IOException {
if(check(row, family, qualifier, value)){
delete(delete);
return true;
}
return false;
}
@Override
public long incrementColumnValue(byte[] row, byte[] family,
byte[] qualifier, long amount) throws IOException {
return incrementColumnValue(row, family, qualifier, amount, true);
}
@Override
public long incrementColumnValue(byte[] row, byte[] family,
byte[] qualifier, long amount, boolean writeToWAL) throws IOException {
if (check(row, family, qualifier, null)){
Put put = new Put(row);
put.add(family, qualifier, Bytes.toBytes(amount));
put(put);
return amount;
}
long newValue = Bytes.toLong(data.get(row).get(family).get(qualifier).lastEntry().getValue())+amount;
data.get(row).get(family).get(qualifier).put(System.currentTimeMillis(),
Bytes.toBytes(newValue));
return newValue;
}
@Override
public boolean isAutoFlush() {
return true;
}
@Override
public void flushCommits() throws IOException {
}
@Override
public void close() throws IOException {
}
@Override
public RowLock lockRow(byte[] row) throws IOException {
return null;
}
@Override
public void unlockRow(RowLock rl) throws IOException {
}
@Override
public Object[] batch(List<? extends Row> actions)
throws IOException,
InterruptedException {
List<Result> results = new ArrayList<Result>();
for (Row r : actions) {
if (r instanceof Delete) {
delete((Delete) r);
continue;
}
if (r instanceof Put) {
put((Put) r);
continue;
}
if (r instanceof Get) {
results.add(get((Get) r));
}
}
return results.toArray();
}
@Override
public void batch(List<? extends Row> actions, Object[] results)
throws IOException,
InterruptedException {
results = batch(actions);
}
@Override
public Result[] get(List<Get> gets) throws IOException {
List<Result> results = new ArrayList<Result>();
for (Get g : gets) {
results.add(get(g));
}
return results.toArray(new Result[results.size()]);
}
@Override
public Result increment(Increment increment) throws IOException {
List<KeyValue> kvs = new ArrayList<KeyValue>();
Map<byte[], NavigableMap<byte[], Long>> famToVal = increment.getFamilyMap();
for (Entry<byte[], NavigableMap<byte[], Long>> ef : famToVal.entrySet()) {
byte[] family = ef.getKey();
NavigableMap<byte[], Long> qToVal = ef.getValue();
for (Entry<byte[], Long> eq : qToVal.entrySet()) {
incrementColumnValue(increment.getRow(), family, eq.getKey(), eq.getValue());
kvs.add(new KeyValue(increment.getRow(), family, eq.getKey(), Bytes.toBytes(eq.getValue())));
}
}
return new Result(kvs);
}
@Override
public <T extends CoprocessorProtocol, R> void coprocessorExec(Class<T> protocol, byte[] startKey, byte[] endKey, Batch.Call<T, R> callable, Batch.Callback<R> callback) throws IOException, Throwable {
}
@Override
public void setAutoFlush(boolean b) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void setAutoFlush(boolean b, boolean b2) {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public long getWriteBufferSize() {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public void setWriteBufferSize(long l) throws IOException {
//To change body of implemented methods use File | Settings | File Templates.
}
@Override
public <T extends CoprocessorProtocol, R> Map<byte[], R> coprocessorExec(Class<T> protocol, byte[] startKey, byte[] endKey, Batch.Call<T, R> callable) throws IOException, Throwable {
return null;
}
@Override
public <T extends CoprocessorProtocol> T coprocessorProxy(Class<T> protocol, byte[] row) {
return null;
}
@Override
public Result append(Append append) throws IOException {
return null;
}
@Override
public void mutateRow(RowMutations rm) throws IOException {
}
private MockHTable(){}
/**
* Default way of constructing a MockHTable
* @return a new MockHTable
*/
public static MockHTable create(){
return new MockHTable();
}
/**
* Create a MockHTable with some pre-loaded data. Parameter should be a map of
* column-to-data mappings of rows. It can be created with a YAML like
*
* <pre>
* rowid:
* family1:qualifier1: value1
* family2:qualifier2: value2
* </pre>
*
* @param dump
* pre-loaded data
* @return a new MockHTable loaded with given data
*/
public static MockHTable with(Map<String, Map<String, String>> dump){
MockHTable ret = new MockHTable();
for (String row : dump.keySet()){
for (String column : dump.get(row).keySet()){
String val = dump.get(row).get(column);
put(ret, row, column, val);
}
}
return ret;
}
/**
* Helper method of pre-loaders, adds parameters to data.
*
* @param ret
* data to load into
* @param row
* rowid
* @param column
* family:qualifier encoded value
* @param val
* value
*/
private static void put(MockHTable ret, String row, String column,
String val) {
String[] fq = split(column);
byte[] family = Bytes.toBytesBinary(fq[0]);
byte[] qualifier = Bytes.toBytesBinary(fq[1]);
NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> families = ret.forceFind(ret.data, Bytes.toBytesBinary(row), new TreeMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>>(Bytes.BYTES_COMPARATOR));
NavigableMap<byte[], NavigableMap<Long, byte[]>> qualifiers = ret.forceFind(families, family, new TreeMap<byte[], NavigableMap<Long, byte[]>>(Bytes.BYTES_COMPARATOR));
NavigableMap<Long, byte[]> values = ret.forceFind(qualifiers, qualifier, new TreeMap<Long, byte[]>());
values.put(System.currentTimeMillis(), Bytes.toBytesBinary(val));
}
/**
* Create a MockHTable with some pre-loaded data. Parameter should be an array
* of string arrays which define every column value individually.
*
* <pre>
* new String[][] {
* { "<rowid>", "<column>", "<value>" },
* { "id", "family:qualifier1", "data1" },
* { "id", "family:qualifier2", "data2" }
* });
* </pre>
*
* @param dump
* @return Mock table pre-loaded with provided data
*/
public static MockHTable with(String[][] dump){
MockHTable ret = new MockHTable();
for(String[] row : dump){
put(ret, row[0], row[1], row[2]);
}
return ret;
}
/**
* Column identification helper
*
* @param column
* column name in the format family:qualifier
* @return <code>{"family", "qualifier"}</code>
*/
private static String[] split(String column){
return new String[]{
column.substring(0, column.indexOf(':')),
column.substring(column.indexOf(':')+1)};
}
/**
* Read a value saved in the object. Useful for making assertions in tests.
*
* @param rowid
* rowid of the data to read
* @param column
* family:qualifier of the data to read
* @return value or null if row or column of the row does not exist
*/
public byte[] read(String rowid, String column){
NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> row = data.get(Bytes.toBytesBinary(rowid));
if (row == null)
return null;
String[] fq = split(column);
byte[] family = Bytes.toBytesBinary(fq[0]);
byte[] qualifier = Bytes.toBytesBinary(fq[1]);
if (!row.containsKey(family))
return null;
if (!row.get(family).containsKey(qualifier))
return null;
return row.get(family).get(qualifier).lastEntry().getValue();
}
public static String toEString(boolean val){
return Bytes.toStringBinary(Bytes.toBytes(val));
}
public static String toEString(double val){
return Bytes.toStringBinary(Bytes.toBytes(val));
}
public static String toEString(float val){
return Bytes.toStringBinary(Bytes.toBytes(val));
}
public static String toEString(int val){
return Bytes.toStringBinary(Bytes.toBytes(val));
}
public static String toEString(long val){
return Bytes.toStringBinary(Bytes.toBytes(val));
}
public static String toEString(short val){
return Bytes.toStringBinary(Bytes.toBytes(val));
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.