gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.trans.dialog; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.jface.dialogs.ProgressMonitorDialog; import org.eclipse.jface.operation.IRunnableWithProgress; import org.eclipse.swt.widgets.Shell; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.debug.BreakPointListener; import org.pentaho.di.trans.debug.StepDebugMeta; import org.pentaho.di.trans.debug.TransDebugMeta; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.ui.core.dialog.ErrorDialog; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.List; /** * Takes care of displaying a dialog that will handle the wait while previewing a transformation... * * @author Matt * @since 13-jan-2006 */ public class TransPreviewProgressDialog { private static Class<?> PKG = TransDialog.class; // for i18n purposes, needed by Translator2!! private Shell shell; private TransMeta transMeta; private String[] previewStepNames; private int[] previewSize; private Trans trans; private boolean cancelled; private String loggingText; private TransDebugMeta transDebugMeta; /** * Creates a new dialog that will handle the wait while previewing a transformation... */ public TransPreviewProgressDialog( Shell shell, TransMeta transMeta, String[] previewStepNames, int[] previewSize ) { this.shell = shell; this.transMeta = transMeta; this.previewStepNames = previewStepNames; this.previewSize = previewSize; cancelled = false; } public TransMeta open() { return open( true ); } /** * Opens the progress dialog * @param showErrorDialogs dictates whether error dialogs should be shown when errors occur - can be set to false * to let the caller control error dialogs instead. * @return a {@link TransMeta} */ public TransMeta open( final boolean showErrorDialogs ) { IRunnableWithProgress op = new IRunnableWithProgress() { public void run( IProgressMonitor monitor ) throws InvocationTargetException, InterruptedException { doPreview( monitor, showErrorDialogs ); } }; try { final ProgressMonitorDialog pmd = new ProgressMonitorDialog( shell ); // Run something in the background to cancel active database queries, forecably if needed! Runnable run = new Runnable() { public void run() { IProgressMonitor monitor = pmd.getProgressMonitor(); while ( pmd.getShell() == null || ( !pmd.getShell().isDisposed() && !monitor.isCanceled() ) ) { try { Thread.sleep( 100 ); } catch ( InterruptedException e ) { // Ignore } } if ( monitor.isCanceled() ) { // Disconnect and see what happens! try { trans.stopAll(); } catch ( Exception e ) { /* Ignore */ } } } }; // Start the cancel tracker in the background! new Thread( run ).start(); pmd.run( true, true, op ); } catch ( InvocationTargetException e ) { if ( showErrorDialogs ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "TransPreviewProgressDialog.ErrorLoadingTransformation.DialogTitle" ), BaseMessages.getString( PKG, "TransPreviewProgressDialog.ErrorLoadingTransformation.DialogMessage" ), e ); } transMeta = null; } catch ( InterruptedException e ) { if ( showErrorDialogs ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "TransPreviewProgressDialog.ErrorLoadingTransformation.DialogTitle" ), BaseMessages.getString( PKG, "TransPreviewProgressDialog.ErrorLoadingTransformation.DialogMessage" ), e ); } transMeta = null; } return transMeta; } private void doPreview( final IProgressMonitor progressMonitor, final boolean showErrorDialogs ) { progressMonitor.beginTask( BaseMessages.getString( PKG, "TransPreviewProgressDialog.Monitor.BeginTask.Title" ), 100 ); // This transformation is ready to run in preview! trans = new Trans( transMeta ); trans.setPreview( true ); // Prepare the execution... // try { trans.prepareExecution( null ); } catch ( final KettleException e ) { if ( showErrorDialogs ) { shell.getDisplay().asyncExec( new Runnable() { public void run() { new ErrorDialog( shell, BaseMessages.getString( PKG, "System.Dialog.Error.Title" ), BaseMessages.getString( PKG, "TransPreviewProgressDialog.Exception.ErrorPreparingTransformation" ), e ); } } ); } // It makes no sense to continue, so just stop running... // return; } // Add the preview / debugging information... // transDebugMeta = new TransDebugMeta( transMeta ); for ( int i = 0; i < previewStepNames.length; i++ ) { StepMeta stepMeta = transMeta.findStep( previewStepNames[i] ); StepDebugMeta stepDebugMeta = new StepDebugMeta( stepMeta ); stepDebugMeta.setReadingFirstRows( true ); stepDebugMeta.setRowCount( previewSize[i] ); transDebugMeta.getStepDebugMetaMap().put( stepMeta, stepDebugMeta ); } // set the appropriate listeners on the transformation... // transDebugMeta.addRowListenersToTransformation( trans ); // Fire off the step threads... start running! // try { trans.startThreads(); } catch ( final KettleException e ) { shell.getDisplay().asyncExec( new Runnable() { public void run() { new ErrorDialog( shell, BaseMessages.getString( PKG, "System.Dialog.Error.Title" ), BaseMessages .getString( PKG, "TransPreviewProgressDialog.Exception.ErrorPreparingTransformation" ), e ); } } ); // It makes no sense to continue, so just stop running... // return; } int previousPct = 0; final List<String> previewComplete = new ArrayList<String>(); while ( previewComplete.size() < previewStepNames.length && !trans.isFinished() && !progressMonitor.isCanceled() ) { // We add a break-point that is called every time we have a step with a full preview row buffer // That makes it easy and fast to see if we have all the rows we need // transDebugMeta.addBreakPointListers( new BreakPointListener() { public void breakPointHit( TransDebugMeta transDebugMeta, StepDebugMeta stepDebugMeta, RowMetaInterface rowBufferMeta, List<Object[]> rowBuffer ) { String stepName = stepDebugMeta.getStepMeta().getName(); previewComplete.add( stepName ); progressMonitor.subTask( BaseMessages.getString( PKG, "TransPreviewProgressDialog.SubTask.StepPreviewFinished", stepName ) ); } } ); // How many rows are done? int nrDone = 0; int nrTotal = 0; for ( StepDebugMeta stepDebugMeta : transDebugMeta.getStepDebugMetaMap().values() ) { nrDone += stepDebugMeta.getRowBuffer().size(); nrTotal += stepDebugMeta.getRowCount(); } int pct = 100 * nrDone / nrTotal; int worked = pct - previousPct; if ( worked > 0 ) { progressMonitor.worked( worked ); } previousPct = pct; // Change the percentage... try { Thread.sleep( 500 ); } catch ( InterruptedException e ) { // Ignore errors } if ( progressMonitor.isCanceled() ) { cancelled = true; trans.stopAll(); } } trans.stopAll(); // Capture preview activity to a String: loggingText = KettleLogStore.getAppender().getBuffer( trans.getLogChannel().getLogChannelId(), true ).toString(); progressMonitor.done(); } /** * @param stepname * the name of the step to get the preview rows for * @return A list of rows as the result of the preview run. */ public List<Object[]> getPreviewRows( String stepname ) { if ( transDebugMeta == null ) { return null; } for ( StepMeta stepMeta : transDebugMeta.getStepDebugMetaMap().keySet() ) { if ( stepMeta.getName().equals( stepname ) ) { StepDebugMeta stepDebugMeta = transDebugMeta.getStepDebugMetaMap().get( stepMeta ); return stepDebugMeta.getRowBuffer(); } } return null; } /** * @param stepname * the name of the step to get the preview rows for * @return A description of the row (metadata) */ public RowMetaInterface getPreviewRowsMeta( String stepname ) { if ( transDebugMeta == null ) { return null; } for ( StepMeta stepMeta : transDebugMeta.getStepDebugMetaMap().keySet() ) { if ( stepMeta.getName().equals( stepname ) ) { StepDebugMeta stepDebugMeta = transDebugMeta.getStepDebugMetaMap().get( stepMeta ); return stepDebugMeta.getRowBufferMeta(); } } return null; } /** * @return true is the preview was canceled by the user */ public boolean isCancelled() { return cancelled; } /** * @return The logging text from the latest preview run */ public String getLoggingText() { return loggingText; } /** * * @return The transformation object that executed the preview TransMeta */ public Trans getTrans() { return trans; } /** * @return the transDebugMeta */ public TransDebugMeta getTransDebugMeta() { return transDebugMeta; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.admin.cluster.settings; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeOperationAction; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.settings.ClusterDynamicSettings; import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.Map; import static org.elasticsearch.cluster.ClusterState.builder; /** * */ public class TransportClusterUpdateSettingsAction extends TransportMasterNodeOperationAction<ClusterUpdateSettingsRequest, ClusterUpdateSettingsResponse> { private final AllocationService allocationService; private final DynamicSettings dynamicSettings; @Inject public TransportClusterUpdateSettingsAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, AllocationService allocationService, @ClusterDynamicSettings DynamicSettings dynamicSettings, ActionFilters actionFilters) { super(settings, ClusterUpdateSettingsAction.NAME, transportService, clusterService, threadPool, actionFilters); this.allocationService = allocationService; this.dynamicSettings = dynamicSettings; } @Override protected String executor() { return ThreadPool.Names.SAME; } @Override protected ClusterBlockException checkBlock(ClusterUpdateSettingsRequest request, ClusterState state) { // allow for dedicated changes to the metadata blocks, so we don't block those to allow to "re-enable" it if ((request.transientSettings().getAsMap().isEmpty() && request.persistentSettings().getAsMap().size() == 1 && request.persistentSettings().get(MetaData.SETTING_READ_ONLY) != null) || request.persistentSettings().getAsMap().isEmpty() && request.transientSettings().getAsMap().size() == 1 && request.transientSettings().get(MetaData.SETTING_READ_ONLY) != null) { return null; } return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); } @Override protected ClusterUpdateSettingsRequest newRequest() { return new ClusterUpdateSettingsRequest(); } @Override protected ClusterUpdateSettingsResponse newResponse() { return new ClusterUpdateSettingsResponse(); } @Override protected void masterOperation(final ClusterUpdateSettingsRequest request, final ClusterState state, final ActionListener<ClusterUpdateSettingsResponse> listener) throws ElasticsearchException { final ImmutableSettings.Builder transientUpdates = ImmutableSettings.settingsBuilder(); final ImmutableSettings.Builder persistentUpdates = ImmutableSettings.settingsBuilder(); clusterService.submitStateUpdateTask("cluster_update_settings", Priority.IMMEDIATE, new AckedClusterStateUpdateTask<ClusterUpdateSettingsResponse>(request, listener) { private volatile boolean changed = false; @Override protected ClusterUpdateSettingsResponse newResponse(boolean acknowledged) { return new ClusterUpdateSettingsResponse(acknowledged, transientUpdates.build(), persistentUpdates.build()); } @Override public void onAllNodesAcked(@Nullable Throwable t) { if (changed) { reroute(true); } else { super.onAllNodesAcked(t); } } @Override public void onAckTimeout() { if (changed) { reroute(false); } else { super.onAckTimeout(); } } private void reroute(final boolean updateSettingsAcked) { // We're about to send a second update task, so we need to check if we're still the elected master // For example the minimum_master_node could have been breached and we're no longer elected master, // so we should *not* execute the reroute. if (!clusterService.state().nodes().localNodeMaster()) { logger.debug("Skipping reroute after cluster update settings, because node is no longer master"); listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, transientUpdates.build(), persistentUpdates.build())); return; } // The reason the reroute needs to be send as separate update task, is that all the *cluster* settings are encapsulate // in the components (e.g. FilterAllocationDecider), so the changes made by the first call aren't visible // to the components until the ClusterStateListener instances have been invoked, but are visible after // the first update task has been completed. clusterService.submitStateUpdateTask("reroute_after_cluster_update_settings", Priority.URGENT, new AckedClusterStateUpdateTask<ClusterUpdateSettingsResponse>(request, listener) { @Override public boolean mustAck(DiscoveryNode discoveryNode) { //we wait for the reroute ack only if the update settings was acknowledged return updateSettingsAcked; } @Override //we return when the cluster reroute is acked or it times out but the acknowledged flag depends on whether the update settings was acknowledged protected ClusterUpdateSettingsResponse newResponse(boolean acknowledged) { return new ClusterUpdateSettingsResponse(updateSettingsAcked && acknowledged, transientUpdates.build(), persistentUpdates.build()); } @Override public void onNoLongerMaster(String source) { logger.debug("failed to preform reroute after cluster settings were updated - current node is no longer a master"); listener.onResponse(new ClusterUpdateSettingsResponse(updateSettingsAcked, transientUpdates.build(), persistentUpdates.build())); } @Override public void onFailure(String source, Throwable t) { //if the reroute fails we only log logger.debug("failed to perform [{}]", t, source); listener.onFailure(new ElasticsearchException("reroute after update settings failed", t)); } @Override public ClusterState execute(final ClusterState currentState) { // now, reroute in case things that require it changed (e.g. number of replicas) RoutingAllocation.Result routingResult = allocationService.reroute(currentState); if (!routingResult.changed()) { return currentState; } return ClusterState.builder(currentState).routingResult(routingResult).build(); } }); } @Override public void onFailure(String source, Throwable t) { logger.debug("failed to perform [{}]", t, source); super.onFailure(source, t); } @Override public ClusterState execute(final ClusterState currentState) { ImmutableSettings.Builder transientSettings = ImmutableSettings.settingsBuilder(); transientSettings.put(currentState.metaData().transientSettings()); for (Map.Entry<String, String> entry : request.transientSettings().getAsMap().entrySet()) { if (dynamicSettings.isDynamicOrLoggingSetting(entry.getKey())) { String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue()); if (error == null) { transientSettings.put(entry.getKey(), entry.getValue()); transientUpdates.put(entry.getKey(), entry.getValue()); changed = true; } else { logger.warn("ignoring transient setting [{}], [{}]", entry.getKey(), error); } } else { logger.warn("ignoring transient setting [{}], not dynamically updateable", entry.getKey()); } } ImmutableSettings.Builder persistentSettings = ImmutableSettings.settingsBuilder(); persistentSettings.put(currentState.metaData().persistentSettings()); for (Map.Entry<String, String> entry : request.persistentSettings().getAsMap().entrySet()) { if (dynamicSettings.isDynamicOrLoggingSetting(entry.getKey())) { String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue()); if (error == null) { persistentSettings.put(entry.getKey(), entry.getValue()); persistentUpdates.put(entry.getKey(), entry.getValue()); changed = true; } else { logger.warn("ignoring persistent setting [{}], [{}]", entry.getKey(), error); } } else { logger.warn("ignoring persistent setting [{}], not dynamically updateable", entry.getKey()); } } if (!changed) { return currentState; } MetaData.Builder metaData = MetaData.builder(currentState.metaData()) .persistentSettings(persistentSettings.build()) .transientSettings(transientSettings.build()); ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); boolean updatedReadOnly = metaData.persistentSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false) || metaData.transientSettings().getAsBoolean(MetaData.SETTING_READ_ONLY, false); if (updatedReadOnly) { blocks.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); } else { blocks.removeGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK); } return builder(currentState).metaData(metaData).blocks(blocks).build(); } }); } }
package com.iwillow.app.android.ui.view; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.ValueAnimator; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Paint; import android.os.Build; import android.support.annotation.ColorInt; import android.support.annotation.Nullable; import android.support.annotation.RequiresApi; import android.text.TextPaint; import android.util.AttributeSet; import android.view.View; import com.iwillow.app.android.R; import java.util.ArrayList; import java.util.List; import static com.iwillow.app.android.util.DimenUtil.dp2px; import static com.iwillow.app.android.util.DimenUtil.sp2px; /** * Created by https://github.com/iwillow/ on 2017/4/17. */ public class StepView extends View { private float mMaxPadding; private List<String> mItems = new ArrayList<>(); private int mCurrentStep; private Paint mPaint; private TextPaint mTextPaint; private float mLineWidth; private float mCircleRadius; private Bitmap mBitmap; private Paint mBitmapPaint; private float mDistance; private ValueAnimator mStepAnimator; private float mStepRadius; private float mMaxStepRadius; private float mMinStepRadius; private boolean mRunning; private int mTextColor; private int mLineColor; private int mCurrentStepColor; private int mNextStepColor; private int mAnimatorColor; public StepView(Context context) { super(context); init(); } public StepView(Context context, @Nullable AttributeSet attrs) { super(context, attrs); init(); } public StepView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(); } @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) public StepView(Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); init(); } private void init() { mPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mLineColor = getResources().getColor(R.color.colorGray); mCurrentStepColor = getResources().getColor(R.color.colorOk); mNextStepColor = getResources().getColor(R.color.colorGray); mAnimatorColor = getResources().getColor(R.color.colorOk1); mPaint.setColor(getResources().getColor(R.color.colorGray)); mPaint.setAntiAlias(true); mLineWidth = dp2px(getResources(), 1f); mCircleRadius = dp2px(getResources(), 5f); mMaxStepRadius = mCircleRadius + dp2px(getResources(), 6f); mMinStepRadius = mCircleRadius + dp2px(getResources(), 3f); mDistance = dp2px(getResources(), 10f); mCurrentStep = 0; mTextPaint = new TextPaint(); mTextPaint.setAntiAlias(true); mTextPaint.setStyle(Paint.Style.FILL); mTextColor = getResources().getColor(R.color.colorGray); mTextPaint.setColor(mTextColor); mTextPaint.setTextAlign(Paint.Align.CENTER); mTextPaint.setTextSize(sp2px(getResources(), 12f)); mBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.ic_finished); mBitmapPaint = new Paint(); mBitmapPaint.setAntiAlias(true); mBitmapPaint.setDither(true); mBitmapPaint.setStyle(Paint.Style.STROKE); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int width; int height; int widthSize = MeasureSpec.getSize(widthMeasureSpec); int widthMode = MeasureSpec.getMode(widthMeasureSpec); int heightSize = MeasureSpec.getSize(heightMeasureSpec); int heightMode = MeasureSpec.getMode(heightMeasureSpec); if (widthMode == MeasureSpec.AT_MOST || widthMode == MeasureSpec.UNSPECIFIED) { width = (int) dp2px(getResources(), 250); } else { width = widthSize; } if (heightMode == MeasureSpec.AT_MOST || heightMode == MeasureSpec.UNSPECIFIED) { height = (int) dp2px(getResources(), 50); } else { height = heightSize; } setMeasuredDimension(width, height); } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); int p = getPaddingLeft(); p = Math.max(p, getPaddingRight()); p = Math.max(p, getPaddingBottom()); mMaxPadding = dp2px(getResources(), 30) + Math.max(p, getPaddingTop()); } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); drawLine(canvas); drawStepItems(canvas); drawSpringAnimation(canvas); } public void setTextColor(@ColorInt int textColor) { if (mTextColor != textColor) { mTextColor = textColor; invalidate(); } } public void setLineColor(@ColorInt int lineColor) { if (mLineColor != lineColor) { mLineColor = lineColor; invalidate(); } } public void setLineWidth(float lineWidth) { if (mLineWidth != lineWidth && lineWidth > 0) { mLineWidth = lineWidth; invalidate(); } } public void setCurrentStepColor(@ColorInt int currentStepColor) { if (mCurrentStepColor != currentStepColor) { mCurrentStepColor = currentStepColor; invalidate(); } } public void setNextStepColor(@ColorInt int nextStepColor) { if (mNextStepColor != nextStepColor) { mNextStepColor = nextStepColor; invalidate(); } } private void drawLine(Canvas canvas) { canvas.save(); canvas.translate(getWidth() / 2, getHeight() / 2); float startX = mMaxPadding - 0.5f * getWidth(); float endX = 0.5f * getWidth() - mMaxPadding; mPaint.setStyle(Paint.Style.FILL); mPaint.setColor(mLineColor); mPaint.setStrokeWidth(mLineWidth); canvas.drawLine(startX, -mDistance, endX, -mDistance, mPaint); canvas.restore(); } private void drawStepItems(Canvas canvas) { if (mItems == null || mItems.size() < 2) { return; } canvas.save(); canvas.translate(getWidth() / 2, getHeight() / 2); float startX = mMaxPadding - 0.5f * getWidth(); float segment = (getWidth() - 2f * mMaxPadding) / (mItems.size() - 1); mPaint.setStyle(Paint.Style.FILL); mTextPaint.setColor(mTextColor); Paint.FontMetrics fontMetrics = mTextPaint.getFontMetrics(); float height = fontMetrics.descent - fontMetrics.ascent + mDistance; for (int i = 0; i < mItems.size(); i++) { String item = mItems.get(i); float cx = startX + i * segment; if (i < mCurrentStep) { canvas.drawBitmap(mBitmap, cx - 0.5f * mBitmap.getWidth(), -0.5f * mBitmap.getHeight() - mDistance, mBitmapPaint); } else if (i == mCurrentStep) { mPaint.setColor(mCurrentStepColor); canvas.drawCircle(cx, -mDistance, mCircleRadius, mPaint); } else { mPaint.setColor(mNextStepColor); canvas.drawCircle(cx, -mDistance, mCircleRadius, mPaint); } canvas.drawText(item, cx, height, mTextPaint); } canvas.restore(); } public void setItems(List<String> items) { if (items == null) { throw new NullPointerException("StepItems cannot be null"); } else if (items.size() < 2) { throw new IllegalArgumentException("The amount of items cannot be less than 2"); } else { mItems.clear(); mItems.addAll(items); resetAnim(); invalidate(); } } private void drawSpringAnimation(Canvas canvas) { if (!mRunning) { return; } canvas.save(); canvas.translate(getWidth() / 2, getHeight() / 2); float startX = mMaxPadding - 0.5f * getWidth(); float segment = (getWidth() - 2f * mMaxPadding) / (mItems.size() - 1); float cx = startX + mCurrentStep * segment; mPaint.setColor(mAnimatorColor); canvas.drawCircle(cx, -mDistance, mStepRadius, mPaint); canvas.restore(); } public void setCurrentStep(int step) { if (step < 0 || step > mItems.size() - 1) { throw new IllegalArgumentException("invalid step index:" + step); } else if (step != mCurrentStep) { mCurrentStep = step; resetAnim(); invalidate(); } } private void resetAnim() { if (mStepAnimator != null) { mStepAnimator.cancel(); } else { mStepAnimator = ValueAnimator.ofFloat(mMinStepRadius, mMaxStepRadius); } mStepAnimator.setDuration(1000); mStepAnimator.setRepeatMode(ValueAnimator.REVERSE); mStepAnimator.setRepeatCount(ValueAnimator.INFINITE); mStepAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { mStepRadius = (float) animation.getAnimatedValue(); invalidate(); } }); mStepAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationCancel(Animator animation) { super.onAnimationCancel(animation); mRunning = false; invalidate(); } }); mRunning = true; mStepAnimator.start(); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); if (mStepAnimator != null) { if (mStepAnimator.isRunning()) { mStepAnimator.cancel(); } mStepAnimator = null; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.clients.producer.internals; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.errors.RecordBatchTooLargeException; import org.apache.kafka.common.errors.TimeoutException; import org.apache.kafka.common.header.Header; import org.apache.kafka.common.record.AbstractRecords; import org.apache.kafka.common.record.CompressionRatioEstimator; import org.apache.kafka.common.record.CompressionType; import org.apache.kafka.common.record.MemoryRecords; import org.apache.kafka.common.record.MemoryRecordsBuilder; import org.apache.kafka.common.record.MutableRecordBatch; import org.apache.kafka.common.record.Record; import org.apache.kafka.common.record.RecordBatch; import org.apache.kafka.common.record.TimestampType; import org.apache.kafka.common.requests.ProduceResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Deque; import java.util.Iterator; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static org.apache.kafka.common.record.RecordBatch.MAGIC_VALUE_V2; import static org.apache.kafka.common.record.RecordBatch.NO_TIMESTAMP; /** * A batch of records that is or will be sent. * * This class is not thread safe and external synchronization must be used when modifying it */ /** * * @author 001 * */ public final class ProducerBatch { private static final Logger log = LoggerFactory.getLogger(ProducerBatch.class); private enum FinalState { ABORTED, FAILED, SUCCEEDED } final long createdMs; final TopicPartition topicPartition; final ProduceRequestResult produceFuture; private final List<Thunk> thunks = new ArrayList<>(); private final MemoryRecordsBuilder recordsBuilder; private final AtomicInteger attempts = new AtomicInteger(0); private final boolean isSplitBatch; private final AtomicReference<FinalState> finalState = new AtomicReference<>(null); int recordCount; int maxRecordSize; private long lastAttemptMs; private long lastAppendTime; private long drainedMs; private String expiryErrorMessage; private boolean retry; public ProducerBatch(TopicPartition tp, MemoryRecordsBuilder recordsBuilder, long now) { this(tp, recordsBuilder, now, false); } public ProducerBatch(TopicPartition tp, MemoryRecordsBuilder recordsBuilder, long now, boolean isSplitBatch) { this.createdMs = now; this.lastAttemptMs = now; this.recordsBuilder = recordsBuilder; this.topicPartition = tp; this.lastAppendTime = createdMs; this.produceFuture = new ProduceRequestResult(topicPartition); this.retry = false; this.isSplitBatch = isSplitBatch; float compressionRatioEstimation = CompressionRatioEstimator.estimation(topicPartition.topic(), recordsBuilder.compressionType()); recordsBuilder.setEstimatedCompressionRatio(compressionRatioEstimation); } /** * Append the record to the current record set and return the relative offset within that record set * * @return The RecordSend corresponding to this record or null if there isn't sufficient room. */ public FutureRecordMetadata tryAppend(long timestamp, byte[] key, byte[] value, Header[] headers, Callback callback, long now) { if (!recordsBuilder.hasRoomFor(timestamp, key, value, headers)) { return null; } else { Long checksum = this.recordsBuilder.append(timestamp, key, value, headers); this.maxRecordSize = Math.max(this.maxRecordSize, AbstractRecords.estimateSizeInBytesUpperBound(magic(), recordsBuilder.compressionType(), key, value, headers)); this.lastAppendTime = now; FutureRecordMetadata future = new FutureRecordMetadata(this.produceFuture, this.recordCount, timestamp, checksum, key == null ? -1 : key.length, value == null ? -1 : value.length); // we have to keep every future returned to the users in case the batch needs to be // split to several new batches and resent. thunks.add(new Thunk(callback, future)); this.recordCount++; return future; } } /** * This method is only used by {@link #split(int)} when splitting a large batch to smaller ones. * @return true if the record has been successfully appended, false otherwise. */ private boolean tryAppendForSplit(long timestamp, ByteBuffer key, ByteBuffer value, Header[] headers, Thunk thunk) { if (!recordsBuilder.hasRoomFor(timestamp, key, value, headers)) { return false; } else { // No need to get the CRC. this.recordsBuilder.append(timestamp, key, value, headers); this.maxRecordSize = Math.max(this.maxRecordSize, AbstractRecords.estimateSizeInBytesUpperBound(magic(), recordsBuilder.compressionType(), key, value, headers)); FutureRecordMetadata future = new FutureRecordMetadata(this.produceFuture, this.recordCount, timestamp, thunk.future.checksumOrNull(), key == null ? -1 : key.remaining(), value == null ? -1 : value.remaining()); // Chain the future to the original thunk. thunk.future.chain(future); this.thunks.add(thunk); this.recordCount++; return true; } } /** * Abort the batch and complete the future and callbacks. * * @param exception The exception to use to complete the future and awaiting callbacks. */ public void abort(RuntimeException exception) { if (!finalState.compareAndSet(null, FinalState.ABORTED)) throw new IllegalStateException("Batch has already been completed in final state " + finalState.get()); log.trace("Aborting batch for partition {}", topicPartition, exception); completeFutureAndFireCallbacks(ProduceResponse.INVALID_OFFSET, RecordBatch.NO_TIMESTAMP, exception); } /** * Complete the request. If the batch was previously aborted, this is a no-op. * * @param baseOffset The base offset of the messages assigned by the server * @param logAppendTime The log append time or -1 if CreateTime is being used * @param exception The exception that occurred (or null if the request was successful) */ public void done(long baseOffset, long logAppendTime, RuntimeException exception) { final FinalState finalState; if (exception == null) { log.trace("Successfully produced messages to {} with base offset {}.", topicPartition, baseOffset); finalState = FinalState.SUCCEEDED; } else { log.trace("Failed to produce messages to {}.", topicPartition, exception); finalState = FinalState.FAILED; } if (!this.finalState.compareAndSet(null, finalState)) { if (this.finalState.get() == FinalState.ABORTED) { log.debug("ProduceResponse returned for {} after batch had already been aborted.", topicPartition); return; } else { throw new IllegalStateException("Batch has already been completed in final state " + this.finalState.get()); } } completeFutureAndFireCallbacks(baseOffset, logAppendTime, exception); } private void completeFutureAndFireCallbacks(long baseOffset, long logAppendTime, RuntimeException exception) { // Set the future before invoking the callbacks as we rely on its state for the `onCompletion` call produceFuture.set(baseOffset, logAppendTime, exception); // execute callbacks for (Thunk thunk : thunks) { try { if (exception == null) { RecordMetadata metadata = thunk.future.value(); if (thunk.callback != null) thunk.callback.onCompletion(metadata, null); } else { if (thunk.callback != null) thunk.callback.onCompletion(null, exception); } } catch (Exception e) { log.error("Error executing user-provided callback on message for topic-partition '{}'", topicPartition, e); } } produceFuture.done(); } public Deque<ProducerBatch> split(int splitBatchSize) { Deque<ProducerBatch> batches = new ArrayDeque<>(); MemoryRecords memoryRecords = recordsBuilder.build(); Iterator<MutableRecordBatch> recordBatchIter = memoryRecords.batches().iterator(); if (!recordBatchIter.hasNext()) throw new IllegalStateException("Cannot split an empty producer batch."); RecordBatch recordBatch = recordBatchIter.next(); if (recordBatch.magic() < MAGIC_VALUE_V2 && !recordBatch.isCompressed()) throw new IllegalArgumentException("Batch splitting cannot be used with non-compressed messages " + "with version v0 and v1"); if (recordBatchIter.hasNext()) throw new IllegalArgumentException("A producer batch should only have one record batch."); Iterator<Thunk> thunkIter = thunks.iterator(); // We always allocate batch size because we are already splitting a big batch. // And we also Retain the create time of the original batch. ProducerBatch batch = null; for (Record record : recordBatch) { assert thunkIter.hasNext(); Thunk thunk = thunkIter.next(); if (batch == null) batch = createBatchOffAccumulatorForRecord(record, splitBatchSize); // A newly created batch can always host the first message. if (!batch.tryAppendForSplit(record.timestamp(), record.key(), record.value(), record.headers(), thunk)) { batches.add(batch); batch = createBatchOffAccumulatorForRecord(record, splitBatchSize); batch.tryAppendForSplit(record.timestamp(), record.key(), record.value(), record.headers(), thunk); } } // Close the last batch and add it to the batch list after split. if (batch != null) batches.add(batch); produceFuture.set(ProduceResponse.INVALID_OFFSET, NO_TIMESTAMP, new RecordBatchTooLargeException()); produceFuture.done(); return batches; } private ProducerBatch createBatchOffAccumulatorForRecord(Record record, int batchSize) { int initialSize = Math.max(AbstractRecords.estimateSizeInBytesUpperBound(magic(), recordsBuilder.compressionType(), record.key(), record.value(), record.headers()), batchSize); ByteBuffer buffer = ByteBuffer.allocate(initialSize); // Note that we intentionally do not set producer state (producerId, epoch, sequence, and isTransactional) // for the newly created batch. This will be set when the batch is dequeued for sending (which is consistent // with how normal batches are handled). MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, magic(), recordsBuilder.compressionType(), TimestampType.CREATE_TIME, 0L); return new ProducerBatch(topicPartition, builder, this.createdMs, true); } public boolean isCompressed() { return recordsBuilder.compressionType() != CompressionType.NONE; } /** * A callback and the associated FutureRecordMetadata argument to pass to it. */ final private static class Thunk { final Callback callback; final FutureRecordMetadata future; Thunk(Callback callback, FutureRecordMetadata future) { this.callback = callback; this.future = future; } } @Override public String toString() { return "ProducerBatch [createdMs=" + createdMs + ", topicPartition=" + topicPartition + ", produceFuture=" + produceFuture + ", thunks=" + thunks + ", recordsBuilder=" + recordsBuilder + ", attempts=" + attempts + ", isSplitBatch=" + isSplitBatch + ", finalState=" + finalState + ", recordCount=" + recordCount + ", maxRecordSize=" + maxRecordSize + ", lastAttemptMs=" + lastAttemptMs + ", lastAppendTime=" + lastAppendTime + ", drainedMs=" + drainedMs + ", expiryErrorMessage=" + expiryErrorMessage + ", retry=" + retry + "]"; } /** * A batch whose metadata is not available should be expired if one of the following is true: * <ol> * <li> the batch is not in retry AND request timeout has elapsed after it is ready (full or linger.ms has reached). * <li> the batch is in retry AND request timeout has elapsed after the backoff period ended. * </ol> * This methods closes this batch and sets {@code expiryErrorMessage} if the batch has timed out. */ boolean maybeExpire(int requestTimeoutMs, long retryBackoffMs, long now, long lingerMs, boolean isFull) { if (!this.inRetry() && isFull && requestTimeoutMs < (now - this.lastAppendTime)) expiryErrorMessage = (now - this.lastAppendTime) + " ms has passed since last append"; else if (!this.inRetry() && requestTimeoutMs < (createdTimeMs(now) - lingerMs)) expiryErrorMessage = (createdTimeMs(now) - lingerMs) + " ms has passed since batch creation plus linger time"; else if (this.inRetry() && requestTimeoutMs < (waitedTimeMs(now) - retryBackoffMs)) expiryErrorMessage = (waitedTimeMs(now) - retryBackoffMs) + " ms has passed since last attempt plus backoff time"; boolean expired = expiryErrorMessage != null; if (expired) abortRecordAppends(); return expired; } /** * If {@link #maybeExpire(int, long, long, long, boolean)} returned true, the sender will fail the batch with * the exception returned by this method. * @return An exception indicating the batch expired. */ TimeoutException timeoutException() { if (expiryErrorMessage == null) throw new IllegalStateException("Batch has not expired"); return new TimeoutException("Expiring " + recordCount + " record(s) for " + topicPartition + ": " + expiryErrorMessage); } int attempts() { return attempts.get(); } void reenqueued(long now) { attempts.getAndIncrement(); lastAttemptMs = Math.max(lastAppendTime, now); lastAppendTime = Math.max(lastAppendTime, now); retry = true; } long queueTimeMs() { return drainedMs - createdMs; } long createdTimeMs(long nowMs) { return Math.max(0, nowMs - createdMs); } long waitedTimeMs(long nowMs) { return Math.max(0, nowMs - lastAttemptMs); } void drained(long nowMs) { this.drainedMs = Math.max(drainedMs, nowMs); } boolean isSplitBatch() { return isSplitBatch; } /** * Returns if the batch is been retried for sending to kafka */ public boolean inRetry() { return this.retry; } public MemoryRecords records() { return recordsBuilder.build(); } public int estimatedSizeInBytes() { return recordsBuilder.estimatedSizeInBytes(); } public double compressionRatio() { return recordsBuilder.compressionRatio(); } public boolean isFull() { return recordsBuilder.isFull(); } public void setProducerState(ProducerIdAndEpoch producerIdAndEpoch, int baseSequence, boolean isTransactional) { recordsBuilder.setProducerState(producerIdAndEpoch.producerId, producerIdAndEpoch.epoch, baseSequence, isTransactional); } /** * Release resources required for record appends (e.g. compression buffers). Once this method is called, it's only * possible to update the RecordBatch header. */ public void closeForRecordAppends() { recordsBuilder.closeForRecordAppends(); } public void close() { recordsBuilder.close(); if (!recordsBuilder.isControlBatch()) { CompressionRatioEstimator.updateEstimation(topicPartition.topic(), recordsBuilder.compressionType(), (float) recordsBuilder.compressionRatio()); } } /** * Abort the record builder and reset the state of the underlying buffer. This is used prior to aborting * the batch with {@link #abort(RuntimeException)} and ensures that no record previously appended can be * read. This is used in scenarios where we want to ensure a batch ultimately gets aborted, but in which * it is not safe to invoke the completion callbacks (e.g. because we are holding a lock, * {@link RecordAccumulator#abortBatches()}). */ public void abortRecordAppends() { recordsBuilder.abort(); } public boolean isClosed() { return recordsBuilder.isClosed(); } public ByteBuffer buffer() { return recordsBuilder.buffer(); } public int initialCapacity() { return recordsBuilder.initialCapacity(); } public boolean isWritable() { return !recordsBuilder.isClosed(); } public byte magic() { return recordsBuilder.magic(); } public long producerId() { return recordsBuilder.producerId(); } public short producerEpoch() { return recordsBuilder.producerEpoch(); } }
package org.motechproject.mHealthDataInterface.utility; import com.google.gson.Gson; import org.motechproject.mHealthDataInterface.bean.*; import org.motechproject.mHealthDataInterface.bean.Observation.ObservationValue; import org.motechproject.mHealthDataInterface.bean.Observation.ObservationValueDeserializer; import org.motechproject.mHealthDataInterface.bean.Person.PreferredAddress; import org.motechproject.mHealthDataInterface.config.bean.ApplicationSettings; import org.motechproject.mHealthDataInterface.util.Constants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.datasource.DriverManagerDataSource; import java.lang.reflect.Type; import java.sql.Connection; import java.sql.SQLException; import java.util.*; /** * Custom class behaving as DAO */ public class Utility { private static final Logger LOGGER = LoggerFactory.getLogger(Utility.class); JdbcTemplate jdbcTemplate; private ApplicationSettings applicationSettings; public Utility(ApplicationSettings applicationSettings) { this.applicationSettings = applicationSettings; /* * get spring data source connection */ jdbcTemplate = getDatabaseConnection(); } /** * get patient details */ public Patient getPatientDetail(String patientId) throws mHealthException { String path = "/patient/" + patientId + "?v=full"; //patient/ad97fe7a-17a6-4e20-a57f-28026de24bb6?v=full Patient patient = null; String json = GenericUtility.getJsonObject(path, applicationSettings); Gson gson = GenericUtility.gsonDateFormat(); if (gson != null && json != null) { patient = gson.fromJson(json, Patient.class); } return patient; } /** * get patients detail by name */ public List<Patient> getPatientsDetailByName(String patientName) throws mHealthException { String path = "/patient?q=" + patientName + "&v=full"; // http://localhost:8080/openmrs/ws/rest/v1/patient?q=shivi&v=full List<Patient> patient = null; String json = GenericUtility.getJsonObject(path, applicationSettings); //Gson gson = GenericUtility.gsonDateFormat(); Map<Type, Object> adapters = new HashMap<Type, Object>(); PatientListResult result = (PatientListResult) JsonUtils.readJsonWithAdapters(json, PatientListResult.class, adapters); return result.getResults(); } /** * get patient village */ public PreferredAddress getPatientVillage(String patientId) throws mHealthException { String path = "/person/" + patientId + "?v=full"; // http://localhost:8080/openmrs/ws/rest/v1//person/453887ce-c7a7-45dd-9308-1f74a1c8724f?v=full PreferredAddress address = null; Person person = null; // String locationUid = null; String json = GenericUtility.getJsonObject(path, applicationSettings); Gson gson = GenericUtility.gsonDateFormat(); if (gson != null && json != null) { person = gson.fromJson(json, Person.class); } if (person != null) { if (person.getPreferredAddress() != null) { address = person.getPreferredAddress(); } } return address; } /** * get all visits by patient */ public List<Encounter> getVisitListByPatientId(String patientId) throws mHealthException { String path = "/encounter?patient=" + patientId + "&v=full"; String json = GenericUtility.getJsonObject(path, applicationSettings); Map<Type, Object> adapters = new HashMap<Type, Object>(); adapters.put(ObservationValue.class, new ObservationValueDeserializer()); EncounterListResult result = (EncounterListResult) JsonUtils.readJsonWithAdapters(json, EncounterListResult.class, adapters); return result.getResults(); } /** * verify health worker */ public boolean verifyHealthWorker(String healthWorkerId) throws mHealthException { String path = "/provider/" + healthWorkerId; Provider healthWorker = null; String json = GenericUtility.getJsonObject(path, applicationSettings); Gson gson = GenericUtility.gsonDateFormat(); if (gson != null && json != null) { healthWorker = gson.fromJson(json, Provider.class); } if (healthWorker != null) { return true; } return false; } /** * get healthWorker details */ public Provider getHealthWorkerDetail(String healthWorkerId) throws mHealthException { String path = "/provider/" + healthWorkerId; Provider healthWorker = null; String json = GenericUtility.getJsonObject(path, applicationSettings); Gson gson = GenericUtility.gsonDateFormat(); if (gson != null && json != null) { healthWorker = gson.fromJson(json, Provider.class); } return healthWorker; } /** * get health Workers detail by name */ public List<Provider> getHealthWorkersDetailByName(String healthWorkerName) throws mHealthException { String path = "/provider?q=" + healthWorkerName + "&v=full"; List<Provider> healthWorker = null; String json = GenericUtility.getJsonObject(path, applicationSettings); Gson gson = GenericUtility.gsonDateFormat(); Map<Type, Object> adapters = new HashMap<Type, Object>(); ProviderListResult result = (ProviderListResult) JsonUtils.readJsonWithAdapters(json, ProviderListResult.class, adapters); return result.getResults(); } /** * get patient details in a particular village */ public List<PatientLocation> getPatientsByVillage(String villageName) throws mHealthException { List<PatientLocation> patientLocationsList = new ArrayList<PatientLocation>(); String query = Constants.query_village + villageName + "%'"; return getPatientLocations(patientLocationsList, query); } /** * get patient details in a particular postal code */ public List<PatientLocation> getPatientsByPostalCode(String postalCode) throws mHealthException { List<PatientLocation> patientLocationsList = new ArrayList<PatientLocation>(); String query = Constants.query_postalCode + postalCode + "'"; return getPatientLocations(patientLocationsList, query); } private List<PatientLocation> getPatientLocations(List<PatientLocation> patientLocationsList, String query) { List<Map<String, Object>> patientList = jdbcTemplate.queryForList(query); if (patientList != null && !patientList.isEmpty()) { for (Map<String, Object> patientMap : patientList) { PatientLocation patientLocation = new PatientLocation(); for (Iterator<Map.Entry<String, Object>> it = patientMap.entrySet().iterator(); it.hasNext(); ) { Map.Entry<String, Object> entry = it.next(); String key = entry.getKey(); Object value = entry.getValue(); if (key.equalsIgnoreCase("uuid")) { if (value != null) { patientLocation.setuUid(value.toString()); } } if (key.equalsIgnoreCase("name")) { if (value != null) { patientLocation.setName(value.toString()); } } if (key.equalsIgnoreCase("village")) { if (value != null) { patientLocation.setVillage(value.toString()); } } if (key.equalsIgnoreCase("address1")) { if (value != null) { patientLocation.setAddress1(value.toString()); } } if (key.equalsIgnoreCase("address2")) { if (value != null) { patientLocation.setAddress2(value.toString()); } } if (key.equalsIgnoreCase("state_province")) { if (value != null) { patientLocation.setState(value.toString()); } } if (key.equalsIgnoreCase("postal_code")) { if (value != null) { patientLocation.setPostalCode(value.toString()); } } if (key.equalsIgnoreCase("country")) { if (value != null) { patientLocation.setCountry(value.toString()); } } if (key.equalsIgnoreCase("phone")) { if (value != null) { patientLocation.setPhone(value.toString()); } } } patientLocationsList.add(patientLocation); } } return patientLocationsList; } /** * Database connection for OpenMRS */ public JdbcTemplate getDatabaseConnection() { DriverManagerDataSource managerDataSource = new DriverManagerDataSource(); managerDataSource.setDriverClassName(applicationSettings.getDbDriver()); managerDataSource.setUrl(applicationSettings.getDbUrl()); managerDataSource.setUsername(applicationSettings.getDbUsername()); managerDataSource.setPassword(applicationSettings.getDbPassword()); JdbcTemplate jdbcTemplate = new JdbcTemplate(managerDataSource); return jdbcTemplate; } /** * * @return */ public boolean testDatabaseConnection() { DriverManagerDataSource testDriverManagerDataSource = new DriverManagerDataSource(); testDriverManagerDataSource.setDriverClassName(applicationSettings.getDbDriver()); testDriverManagerDataSource.setUrl(applicationSettings.getDbUrl()); testDriverManagerDataSource.setUsername(applicationSettings.getDbUsername()); testDriverManagerDataSource.setPassword(applicationSettings.getDbPassword()); try { Connection connection = testDriverManagerDataSource.getConnection(); return true; } catch (SQLException e) { LOGGER.debug("Error while creating the database connection with given connection properties.", e); } return false; } }
package org.cagrid.tutorials.photosharing; import gov.nih.nci.cagrid.common.Runner; import gov.nih.nci.cagrid.common.Utils; import gov.nih.nci.cagrid.common.security.ProxyUtil; import java.awt.BorderLayout; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.util.List; import javax.swing.JButton; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTextField; import org.cagrid.demos.photoservicereg.client.PhotoSharingRegistrationClient; import org.cagrid.gaards.dorian.client.GridUserClient; import org.cagrid.gaards.dorian.federation.HostCertificateRecord; import org.cagrid.gaards.dorian.federation.HostCertificateStatus; import org.cagrid.gaards.pki.CertUtil; import org.cagrid.gaards.ui.common.ProgressPanel; import org.cagrid.gaards.ui.common.TitlePanel; import org.cagrid.gaards.ui.dorian.DorianHandle; import org.cagrid.gaards.ui.dorian.ServicesManager; import org.cagrid.grape.ApplicationComponent; import org.cagrid.grape.GridApplication; import org.cagrid.grape.utils.ErrorDialog; /** * @author <A HREF="MAILTO:langella@bmi.osu.edu">Stephen Langella</A> * @author <A HREF="MAILTO:oster@bmi.osu.edu">Scott Oster</A> * @author <A HREF="MAILTO:hastings@bmi.osu.edu">Shannon Hastings</A> * @author <A HREF="MAILTO:ervin@bmi.osu.edu">David W. Ervin</A> * @version $Id: GridGrouperBaseTreeNode.java,v 1.1 2006/08/04 03:49:26 langella * Exp $ */ public class RegistrationWindow extends ApplicationComponent { private static final long serialVersionUID = 1L; private JPanel jContentPane = null; private JPanel mainPanel = null; private JPanel titlePanel = null; private ProgressPanel progress = null; private JPanel infoPanel = null; private JScrollPane jScrollPane = null; private HostCertificatesTable hostCertificates = null; private JLabel jLabel = null; private JTextField gridIdentity = null; private JPanel buttonPanel = null; private JButton register = null; /** * This is the default constructor */ public RegistrationWindow() { super(); initialize(); Runner runner = new Runner() { public void execute() { try { lookupHostCertificates(); } catch (Exception e) { ErrorDialog.showError(e); } } }; try { GridApplication.getContext().executeInBackground(runner); } catch (Exception t) { t.getMessage(); } } private void lookupHostCertificates() { try { getRegister().setEnabled(false); getProgress().showProgress("Finding hosts...."); Thread.sleep(200); List<DorianHandle> services = ServicesManager.getInstance().getDorianServices(); for (int j = 0; j < services.size(); j++) { DorianHandle handle = services.get(j); GridUserClient client = handle.getUserClient(ProxyUtil.getDefaultProxy()); List<HostCertificateRecord> records = client.getOwnedHostCertificates(); for (int i = 0; i < records.size(); i++) { if (records.get(i).getStatus().equals(HostCertificateStatus.Active)) { getHostCertificates().addHostCertificate(records.get(i)); } } } getProgress().stopProgress(); getRegister().setEnabled(true); } catch (Exception e) { getProgress().stopProgress(); ErrorDialog.showError(Utils.getExceptionMessage(e), e); dispose(); } } /** * This method initializes this */ private void initialize() { this.setSize(500, 300); this.setContentPane(getJContentPane()); this.setTitle("Add Photo"); this.setFrameIcon(GalleryLookAndFeel.getGallery22x22()); } /** * This method initializes jContentPane * * @return javax.swing.JPanel */ private JPanel getJContentPane() { if (jContentPane == null) { jContentPane = new JPanel(); jContentPane.setLayout(new BorderLayout()); jContentPane.add(getMainPanel(), BorderLayout.CENTER); } return jContentPane; } /** * This method initializes mainPanel * * @return javax.swing.JPanel */ private JPanel getMainPanel() { if (mainPanel == null) { GridBagConstraints gridBagConstraints13 = new GridBagConstraints(); gridBagConstraints13.gridx = 0; gridBagConstraints13.insets = new Insets(2, 2, 2, 2); gridBagConstraints13.gridy = 3; GridBagConstraints gridBagConstraints1 = new GridBagConstraints(); gridBagConstraints1.fill = GridBagConstraints.BOTH; gridBagConstraints1.weighty = 1.0; gridBagConstraints1.gridx = 0; gridBagConstraints1.gridy = 2; gridBagConstraints1.insets = new Insets(2, 2, 2, 2); gridBagConstraints1.weightx = 1.0; GridBagConstraints gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.insets = new Insets(2, 2, 2, 2); gridBagConstraints.anchor = GridBagConstraints.CENTER; gridBagConstraints.weightx = 1.0D; gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.gridy = 1; GridBagConstraints gridBagConstraints12 = new GridBagConstraints(); gridBagConstraints12.gridx = 0; gridBagConstraints12.insets = new Insets(0, 0, 0, 0); gridBagConstraints12.weightx = 1.0D; gridBagConstraints12.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints12.anchor = GridBagConstraints.EAST; gridBagConstraints12.gridy = 4; GridBagConstraints gridBagConstraints11 = new GridBagConstraints(); gridBagConstraints11.gridx = 0; gridBagConstraints11.insets = new Insets(2, 2, 2, 2); gridBagConstraints11.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints11.weightx = 1.0D; gridBagConstraints11.gridy = 0; mainPanel = new JPanel(); mainPanel.setLayout(new GridBagLayout()); mainPanel.add(getTitlePanel(), gridBagConstraints11); mainPanel.add(getProgress(), gridBagConstraints12); mainPanel.add(getInfoPanel(), gridBagConstraints); mainPanel.add(getJScrollPane(), gridBagConstraints1); mainPanel.add(getButtonPanel(), gridBagConstraints13); } return mainPanel; } /** * This method initializes titlePanel * * @return javax.swing.JPanel */ private JPanel getTitlePanel() { if (titlePanel == null) { titlePanel = new TitlePanel("Photo Sharing Tutorial Registration", "Register for the photo sharing tutorial."); } return titlePanel; } /** * This method initializes progress * * @return javax.swing.JPanel */ private ProgressPanel getProgress() { if (progress == null) { progress = new ProgressPanel(); } return progress; } /** * This method initializes infoPanel * * @return javax.swing.JPanel */ private JPanel getInfoPanel() { if (infoPanel == null) { GridBagConstraints gridBagConstraints3 = new GridBagConstraints(); gridBagConstraints3.anchor = GridBagConstraints.WEST; gridBagConstraints3.gridy = 0; gridBagConstraints3.gridx = 0; gridBagConstraints3.insets = new Insets(2, 2, 2, 2); GridBagConstraints gridBagConstraints2 = new GridBagConstraints(); gridBagConstraints2.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints2.gridx = 1; gridBagConstraints2.gridy = 0; gridBagConstraints2.insets = new Insets(2, 2, 2, 2); gridBagConstraints2.anchor = GridBagConstraints.WEST; gridBagConstraints2.weightx = 1.0; jLabel = new JLabel(); jLabel.setText("Grid Identity"); infoPanel = new JPanel(); infoPanel.setLayout(new GridBagLayout()); infoPanel.add(jLabel, gridBagConstraints3); infoPanel.add(getGridIdentity(), gridBagConstraints2); } return infoPanel; } /** * This method initializes jScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getJScrollPane() { if (jScrollPane == null) { jScrollPane = new JScrollPane(); jScrollPane.setViewportView(getHostCertificates()); jScrollPane.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Select the host you will perform the tutorial on", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, org.cagrid.grape.LookAndFeel .getPanelLabelColor())); } return jScrollPane; } /** * This method initializes hostCertificates * * @return javax.swing.JTable */ private HostCertificatesTable getHostCertificates() { if (hostCertificates == null) { hostCertificates = new HostCertificatesTable(); } return hostCertificates; } /** * This method initializes gridIdentity * * @return javax.swing.JTextField */ private JTextField getGridIdentity() { if (gridIdentity == null) { gridIdentity = new JTextField(); gridIdentity.setEditable(false); try { gridIdentity.setText(ProxyUtil.getDefaultProxy().getIdentity()); } catch (Exception e) { gridIdentity.setText(""); } } return gridIdentity; } /** * This method initializes buttonPanel * * @return javax.swing.JPanel */ private JPanel getButtonPanel() { if (buttonPanel == null) { GridBagConstraints gridBagConstraints4 = new GridBagConstraints(); gridBagConstraints4.gridx = 0; gridBagConstraints4.insets = new Insets(2, 2, 2, 2); gridBagConstraints4.gridy = 0; buttonPanel = new JPanel(); buttonPanel.setLayout(new GridBagLayout()); buttonPanel.add(getRegister(), gridBagConstraints4); } return buttonPanel; } /** * This method initializes register * * @return javax.swing.JButton */ private JButton getRegister() { if (register == null) { register = new JButton(); register.setText("Register"); register.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { Runner runner = new Runner() { public void execute() { try { register(); } catch (Exception e) { ErrorDialog.showError(e); } } }; try { GridApplication.getContext().executeInBackground(runner); } catch (Exception t) { t.getMessage(); } } }); getRootPane().setDefaultButton(register); } return register; } private void register() { try { getRegister().setEnabled(false); getProgress().showProgress("Registering for tutorial...."); PhotoSharingRegistrationClient client = new PhotoSharingRegistrationClient( org.cagrid.tutorials.photosharing.Utils.getRegistrationService(), ProxyUtil.getDefaultProxy()); client.registerPhotoSharingService(CertUtil.subjectToIdentity(getHostCertificates() .getSelectedHostCertificate().getSubject())); getProgress().stopProgress(); getRegister().setEnabled(true); dispose(); GridApplication.getContext().showMessage( "Congratulations you have successfully registered for the photo sharing tutorial."); } catch (Exception e) { getProgress().stopProgress(); ErrorDialog.showError(Utils.getExceptionMessage(e), e); getRegister().setEnabled(true); } } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.debugger.impl; import com.intellij.debugger.engine.DebuggerManagerThreadImpl; import com.intellij.debugger.engine.DebuggerUtils; import com.intellij.debugger.engine.SuspendContextImpl; import com.intellij.debugger.engine.events.DebuggerCommandImpl; import com.intellij.debugger.engine.events.SuspendContextCommandImpl; import com.intellij.diagnostic.ThreadDumper; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.ThrowableComputable; import com.intellij.openapi.util.registry.Registry; import com.intellij.psi.CommonClassNames; import com.intellij.util.ThrowableRunnable; import com.jetbrains.jdi.*; import com.sun.jdi.*; import com.sun.jdi.event.EventSet; import com.sun.jdi.request.EventRequest; import com.sun.jdi.request.EventRequestManager; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.function.Function; import java.util.stream.Stream; import static java.util.concurrent.CompletableFuture.allOf; import static java.util.concurrent.CompletableFuture.completedFuture; public final class DebuggerUtilsAsync { private static final Logger LOG = Logger.getInstance(DebuggerUtilsAsync.class); public static boolean isAsyncEnabled() { return Registry.is("debugger.async.jdi"); } // Debugger manager thread public static CompletableFuture<String> getStringValue(StringReference value) { if (value instanceof StringReferenceImpl && isAsyncEnabled()) { return reschedule(((StringReferenceImpl)value).valueAsync()); } return completedFuture(value.value()); } public static CompletableFuture<List<Method>> allMethods(ReferenceType type) { if (type instanceof ReferenceTypeImpl && isAsyncEnabled()) { return reschedule(((ReferenceTypeImpl)type).allMethodsAsync()); } return completedFuture(type.allMethods()); } public static CompletableFuture<List<Field>> allFields(ReferenceType type) { if (type instanceof ReferenceTypeImpl && isAsyncEnabled()) { return reschedule(((ReferenceTypeImpl)type).allFieldsAsync()); } return completedFuture(type.allFields()); } public static CompletableFuture<List<Field>> fields(ReferenceType type) { if (type instanceof ReferenceTypeImpl && isAsyncEnabled()) { return reschedule(((ReferenceTypeImpl)type).fieldsAsync()); } return completedFuture(type.fields()); } public static CompletableFuture<? extends Type> type(@Nullable Value value) { if (value == null) { return completedFuture(null); } if (value instanceof ObjectReferenceImpl && isAsyncEnabled()) { return reschedule(((ObjectReferenceImpl)value).typeAsync()); } return completedFuture(value.type()); } public static CompletableFuture<Value> getValue(ObjectReference ref, Field field) { if (ref instanceof ObjectReferenceImpl && isAsyncEnabled()) { return reschedule(((ObjectReferenceImpl)ref).getValueAsync(field)); } return completedFuture(ref.getValue(field)); } public static CompletableFuture<Map<Field, Value>> getValues(ObjectReference ref, List<Field> fields) { if (ref instanceof ObjectReferenceImpl && isAsyncEnabled()) { return reschedule(((ObjectReferenceImpl)ref).getValuesAsync(fields)); } return completedFuture(ref.getValues(fields)); } public static CompletableFuture<Map<Field, Value>> getValues(ReferenceType type, List<Field> fields) { if (type instanceof ReferenceTypeImpl && isAsyncEnabled()) { return reschedule(((ReferenceTypeImpl)type).getValuesAsync(fields)); } return completedFuture(type.getValues(fields)); } public static CompletableFuture<List<Value>> getValues(ArrayReference ref, int index, int length) { if (ref instanceof ArrayReferenceImpl && isAsyncEnabled()) { return reschedule(((ArrayReferenceImpl)ref).getValuesAsync(index, length)); } return completedFuture(ref.getValues(index, length)); } public static CompletableFuture<List<ThreadReference>> allThreads(VirtualMachine vm) { if (vm instanceof VirtualMachineImpl && isAsyncEnabled()) { return reschedule(((VirtualMachineImpl)vm).allThreadsAsync()); } return completedFuture(vm.allThreads()); } public static CompletableFuture<Integer> length(ArrayReference ref) { if (ref instanceof ArrayReferenceImpl && isAsyncEnabled()) { return reschedule(((ArrayReferenceImpl)ref).lengthAsync()); } return completedFuture(ref.length()); } public static CompletableFuture<String> sourceName(ReferenceType type) { if (type instanceof ReferenceTypeImpl && isAsyncEnabled()) { return reschedule(((ReferenceTypeImpl)type).sourceNameAsync()); } return toCompletableFuture(() -> type.sourceName()); } public static CompletableFuture<List<String>> availableStrata(ReferenceType type) { if (type instanceof ReferenceTypeImpl && isAsyncEnabled()) { return reschedule(((ReferenceTypeImpl)type).availableStrataAsync()); } return toCompletableFuture(() -> type.availableStrata()); } public static CompletableFuture<List<Location>> locationsOfLine(@NotNull ReferenceType type, int lineNumber) { return locationsOfLine(type, type.virtualMachine().getDefaultStratum(), null, lineNumber); } /** * Drop-in replacement for the standard jdi version, but "parallel" inside, so a lot faster when type has lots of methods */ public static List<Location> locationsOfLineSync(@NotNull ReferenceType type, int lineNumber) throws AbsentInformationException { return locationsOfLineSync(type, type.virtualMachine().getDefaultStratum(), null, lineNumber); } public static CompletableFuture<List<Location>> locationsOfLine(ReferenceType type, String stratum, String sourceName, int lineNumber) { if (type instanceof ReferenceTypeImpl && isAsyncEnabled()) { return reschedule(((ReferenceTypeImpl)type).locationsOfLineAsync(stratum, sourceName, lineNumber)); } return toCompletableFuture(() -> type.locationsOfLine(stratum, sourceName, lineNumber)); } /** * Drop-in replacement for the standard jdi version, but "parallel" inside, so a lot faster when type has lots of methods */ public static List<Location> locationsOfLineSync(ReferenceType type, String stratum, String sourceName, int lineNumber) throws AbsentInformationException { if (type instanceof ReferenceTypeImpl && isAsyncEnabled()) { try { return ((ReferenceTypeImpl)type).locationsOfLineAsync(stratum, sourceName, lineNumber).get(); } catch (Exception e) { if (e.getCause() instanceof AbsentInformationException) { throw (AbsentInformationException)e.getCause(); } LOG.warn(e); } } return type.locationsOfLine(stratum, sourceName, lineNumber); } public static CompletableFuture<List<Location>> allLineLocationsAsync(Method method) { if (method instanceof MethodImpl && isAsyncEnabled()) { return reschedule(((MethodImpl)method).allLineLocationsAsync()); } return toCompletableFuture(() -> method.allLineLocations()); } public static CompletableFuture<Boolean> instanceOf(@Nullable Type subType, @NotNull String superType) { return instanceOf(subType, superType, true); } private static CompletableFuture<Boolean> instanceOf(@Nullable Type subType, @NotNull String superType, boolean reschedule) { if (!isAsyncEnabled()) { return completedFuture(DebuggerUtils.instanceOf(subType, superType)); } if (subType == null || subType instanceof VoidType) { return completedFuture(false); } if (subType instanceof PrimitiveType) { return completedFuture(superType.equals(subType.name())); } if (CommonClassNames.JAVA_LANG_OBJECT.equals(superType)) { return completedFuture(true); } CompletableFuture<Boolean> res = new CompletableFuture<>(); instanceOfObject(subType, superType, res).thenRun(() -> res.complete(false)); if (!reschedule) { return res; } return reschedule(res); } private static CompletableFuture<Void> instanceOfObject(@Nullable Type subType, @NotNull String superType, CompletableFuture<Boolean> res) { if (subType == null || res.isDone()) { return completedFuture(null); } if (typeEquals(subType, superType)) { res.complete(true); return completedFuture(null); // early return } if (subType instanceof ClassType) { return allOf( superclass((ClassType)subType).thenCompose(s -> instanceOfObject(s, superType, res)), interfaces((ClassType)subType).thenCompose( interfaces -> allOf(interfaces.stream().map(i -> instanceOfObject(i, superType, res)).toArray(CompletableFuture[]::new)))); } if (subType instanceof InterfaceType) { return allOf( superinterfaces((InterfaceType)subType).thenCompose( interfaces -> allOf(interfaces.stream().map(i -> instanceOfObject(i, superType, res)).toArray(CompletableFuture[]::new)))); } if (subType instanceof ArrayType && superType.endsWith("[]")) { try { String superTypeItem = superType.substring(0, superType.length() - 2); Type subTypeItem = ((ArrayType)subType).componentType(); return instanceOf(subTypeItem, superTypeItem, false).thenAccept(r -> { if (r) res.complete(true); }); } catch (ClassNotLoadedException e) { //LOG.info(e); } } return completedFuture(null); } public static CompletableFuture<Void> deleteEventRequest(EventRequestManager eventRequestManager, EventRequest request) { if (isAsyncEnabled() && eventRequestManager instanceof EventRequestManagerImpl) { return ((EventRequestManagerImpl)eventRequestManager).deleteEventRequestAsync(request); } else { try { eventRequestManager.deleteEventRequest(request); } catch (ArrayIndexOutOfBoundsException e) { LOG.error("Exception in EventRequestManager.deleteEventRequest", e, ThreadDumper.dumpThreadsToString()); } } return completedFuture(null); } // Copied from DebuggerUtils private static boolean typeEquals(@NotNull Type type, @NotNull String typeName) { int genericPos = typeName.indexOf('<'); if (genericPos > -1) { typeName = typeName.substring(0, genericPos); } return type.name().replace('$', '.').equals(typeName.replace('$', '.')); } public static CompletableFuture<Type> findAnyBaseType(@NotNull Type subType, Function<? super Type, ? extends CompletableFuture<Boolean>> checker) { CompletableFuture<Type> res = new CompletableFuture<>(); findAnyBaseType(subType, checker, res).thenRun(() -> res.complete(null)); return reschedule(res); } private static CompletableFuture<Void> findAnyBaseType(@Nullable Type type, Function<? super Type, ? extends CompletableFuture<Boolean>> checker, CompletableFuture<Type> res) { if (type == null || res.isDone()) { return completedFuture(null); } // check self CompletableFuture<Void> self = checker.apply(type).thenAccept(r -> { if (r) { res.complete(type); } }); // check base types if (type instanceof ClassType) { return allOf( self, superclass((ClassType)type).thenCompose(s -> findAnyBaseType(s, checker, res)), interfaces((ClassType)type).thenCompose( interfaces -> allOf(interfaces.stream().map(i -> findAnyBaseType(i, checker, res)).toArray(CompletableFuture[]::new)))); } if (type instanceof InterfaceType) { return allOf( self, superinterfaces((InterfaceType)type).thenCompose( interfaces -> allOf(interfaces.stream().map(i -> findAnyBaseType(i, checker, res)).toArray(CompletableFuture[]::new)))); } return self; } // Reader thread public static CompletableFuture<List<Method>> methods(ReferenceType type) { if (type instanceof ReferenceTypeImpl && isAsyncEnabled()) { return ((ReferenceTypeImpl)type).methodsAsync(); } return completedFuture(type.methods()); } public static CompletableFuture<List<InterfaceType>> superinterfaces(InterfaceType iface) { if (iface instanceof InterfaceTypeImpl && isAsyncEnabled()) { return ((InterfaceTypeImpl)iface).superinterfacesAsync(); } return completedFuture(iface.superinterfaces()); } public static CompletableFuture<ClassType> superclass(ClassType cls) { if (cls instanceof ClassTypeImpl && isAsyncEnabled()) { return ((ClassTypeImpl)cls).superclassAsync(); } return completedFuture(cls.superclass()); } public static CompletableFuture<List<InterfaceType>> interfaces(ClassType cls) { if (cls instanceof ClassTypeImpl && isAsyncEnabled()) { return ((ClassTypeImpl)cls).interfacesAsync(); } return completedFuture(cls.interfaces()); } public static CompletableFuture<Stream<? extends ReferenceType>> supertypes(ReferenceType type) { if (!isAsyncEnabled()) { return toCompletableFuture(() -> DebuggerUtilsImpl.supertypes(type)); } if (type instanceof InterfaceType) { return superinterfaces(((InterfaceType)type)).thenApply(Collection::stream); } else if (type instanceof ClassType) { return superclass((ClassType)type).thenCombine(interfaces((ClassType)type), (superclass, interfaces) -> StreamEx.<ReferenceType>ofNullable(superclass).prepend(interfaces)); } return completedFuture(StreamEx.empty()); } public static CompletableFuture<byte[]> bytecodes(Method method) { if (method instanceof MethodImpl && isAsyncEnabled()) { return ((MethodImpl)method).bytecodesAsync(); } return toCompletableFuture(() -> method.bytecodes()); } public static CompletableFuture<byte[]> constantPool(ReferenceType type) { if (type instanceof ReferenceTypeImpl && isAsyncEnabled()) { return ((ReferenceTypeImpl)type).constantPoolAsync(); } return toCompletableFuture(() -> type.constantPool()); } public static CompletableFuture<Void> setEnabled(EventRequest request, boolean value) { EventRequestManager eventRequestManager = request.virtualMachine().eventRequestManager(); if (eventRequestManager instanceof EventRequestManagerImpl && isAsyncEnabled()) { return ((EventRequestManagerImpl)eventRequestManager).setEnabledAsync(request, value); } return toCompletableFuture(() -> request.setEnabled(value)); } public static CompletableFuture<Void> resume(VirtualMachine vm) { if (vm instanceof VirtualMachineImpl && isAsyncEnabled()) { return ((VirtualMachineImpl)vm).resumeAsync(); } return toCompletableFuture(() -> vm.resume()); } public static CompletableFuture<Void> resume(ThreadReference thread) { if (thread instanceof ThreadReferenceImpl && isAsyncEnabled()) { return ((ThreadReferenceImpl)thread).resumeAsync(); } return toCompletableFuture(() -> thread.resume()); } public static CompletableFuture<Void> resume(EventSet eventSet) { if (eventSet instanceof EventSetImpl && isAsyncEnabled()) { return ((EventSetImpl)eventSet).resumeAsync(); } return toCompletableFuture(() -> eventSet.resume()); } public static CompletableFuture<List<ReferenceType>> allCLasses(VirtualMachine virtualMachine) { if (virtualMachine instanceof VirtualMachineImpl && isAsyncEnabled()) { return ((VirtualMachineImpl)virtualMachine).allClassesAsync(); } return toCompletableFuture(() -> virtualMachine.allClasses()); } /** * Schedule future completion in a separate command with the same priority and suspend context (if available) * as in the command being processed at the moment */ public static <T> CompletableFuture<T> reschedule(CompletableFuture<T> future) { DebuggerManagerThreadImpl.assertIsManagerThread(); DebuggerManagerThreadImpl thread = (DebuggerManagerThreadImpl)InvokeThread.currentThread(); LOG.assertTrue(thread != null); DebuggerCommandImpl event = DebuggerManagerThreadImpl.getCurrentCommand(); LOG.assertTrue(event != null); PrioritizedTask.Priority priority = event.getPriority(); SuspendContextImpl suspendContext = event instanceof SuspendContextCommandImpl ? ((SuspendContextCommandImpl)event).getSuspendContext() : null; CompletableFuture<T> res = new CompletableFuture<>(); future.whenComplete((r, ex) -> { if (DebuggerManagerThreadImpl.isManagerThread()) { completeFuture(r, ex, res); } else if (suspendContext != null) { thread.schedule(new SuspendContextCommandImpl(suspendContext) { @Override public Priority getPriority() { return priority; } @Override public void contextAction(@NotNull SuspendContextImpl suspendContext) { completeFuture(r, ex, res); } @Override protected void commandCancelled() { res.cancel(false); } }); } else { thread.schedule(new DebuggerCommandImpl(priority) { @Override protected void action() { completeFuture(r, ex, res); } @Override protected void commandCancelled() { res.cancel(false); } }); } }); return res; } public static Throwable unwrap(@Nullable Throwable throwable) { return throwable instanceof CompletionException ? throwable.getCause() : throwable; } public static <T> T logError(@Nullable Throwable throwable) { DebuggerUtilsImpl.logError(unwrap(throwable)); return null; } public static <T, E extends Exception> CompletableFuture<T> toCompletableFuture(ThrowableComputable<? extends T, E> provider) { try { return completedFuture(provider.compute()); } catch (Exception e) { return CompletableFuture.failedFuture(e); } } public static <E extends Exception> CompletableFuture<Void> toCompletableFuture(ThrowableRunnable<E> provider) { try { provider.run(); return completedFuture(null); } catch (Exception e) { return CompletableFuture.failedFuture(e); } } private static <T> void completeFuture(T res, Throwable ex, CompletableFuture<T> future) { if (ex != null) { future.completeExceptionally(ex); } else { future.complete(res); } } }
// Copyright (C) 2008 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.client.changes; import static com.google.gerrit.client.FormatUtil.relativeFormat; import static com.google.gerrit.client.FormatUtil.shortFormat; import com.google.gerrit.client.Gerrit; import com.google.gerrit.client.info.AccountInfo; import com.google.gerrit.client.info.ChangeInfo; import com.google.gerrit.client.info.ChangeInfo.LabelInfo; import com.google.gerrit.client.ui.AccountLinkPanel; import com.google.gerrit.client.ui.BranchLink; import com.google.gerrit.client.ui.ChangeLink; import com.google.gerrit.client.ui.NavigationTable; import com.google.gerrit.client.ui.NeedsSignInKeyCommand; import com.google.gerrit.client.ui.ProjectLink; import com.google.gerrit.common.PageLinks; import com.google.gerrit.extensions.client.GeneralPreferencesInfo.ReviewCategoryStrategy; import com.google.gerrit.extensions.client.ListChangesOption; import com.google.gerrit.reviewdb.client.Change; import com.google.gwt.dom.client.Element; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.event.dom.client.KeyPressEvent; import com.google.gwt.user.client.ui.FlexTable.FlexCellFormatter; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.HTMLTable.Cell; import com.google.gwt.user.client.ui.HTMLTable.CellFormatter; import com.google.gwt.user.client.ui.Image; import com.google.gwt.user.client.ui.InlineLabel; import com.google.gwt.user.client.ui.SimplePanel; import com.google.gwt.user.client.ui.UIObject; import com.google.gwt.user.client.ui.Widget; import java.util.ArrayList; import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Set; public class ChangeTable extends NavigationTable<ChangeInfo> { // If changing default options, also update in // ChangeIT#defaultSearchDoesNotTouchDatabase(). static final Set<ListChangesOption> OPTIONS = Collections.unmodifiableSet(EnumSet.of( ListChangesOption.LABELS, ListChangesOption.DETAILED_ACCOUNTS)); private static final int C_STAR = 1; private static final int C_ID = 2; private static final int C_SUBJECT = 3; private static final int C_STATUS = 4; private static final int C_OWNER = 5; private static final int C_PROJECT = 6; private static final int C_BRANCH = 7; private static final int C_LAST_UPDATE = 8; private static final int C_SIZE = 9; private static final int BASE_COLUMNS = 10; private final List<Section> sections; private int columns; private final boolean showLegacyId; private List<String> labelNames; public ChangeTable() { super(Util.C.changeItemHelp()); columns = BASE_COLUMNS; labelNames = Collections.emptyList(); showLegacyId = Gerrit.getUserPreferences().legacycidInChangeTable(); if (Gerrit.isSignedIn()) { keysAction.add(new StarKeyCommand(0, 's', Util.C.changeTableStar())); } sections = new ArrayList<>(); table.setText(0, C_STAR, ""); table.setText(0, C_ID, Util.C.changeTableColumnID()); table.setText(0, C_SUBJECT, Util.C.changeTableColumnSubject()); table.setText(0, C_STATUS, Util.C.changeTableColumnStatus()); table.setText(0, C_OWNER, Util.C.changeTableColumnOwner()); table.setText(0, C_PROJECT, Util.C.changeTableColumnProject()); table.setText(0, C_BRANCH, Util.C.changeTableColumnBranch()); table.setText(0, C_LAST_UPDATE, Util.C.changeTableColumnLastUpdate()); table.setText(0, C_SIZE, Util.C.changeTableColumnSize()); final FlexCellFormatter fmt = table.getFlexCellFormatter(); fmt.addStyleName(0, C_STAR, Gerrit.RESOURCES.css().iconHeader()); for (int i = C_ID; i < columns; i++) { fmt.addStyleName(0, i, Gerrit.RESOURCES.css().dataHeader()); } if (!showLegacyId) { fmt.addStyleName(0, C_ID, Gerrit.RESOURCES.css().dataHeaderHidden()); } table.addClickHandler(new ClickHandler() { @Override public void onClick(final ClickEvent event) { final Cell cell = table.getCellForEvent(event); if (cell == null) { return; } if (cell.getCellIndex() == C_STAR) { // Don't do anything (handled by star itself). } else if (cell.getCellIndex() == C_STATUS) { // Don't do anything. } else if (cell.getCellIndex() == C_OWNER) { // Don't do anything. } else if (getRowItem(cell.getRowIndex()) != null) { movePointerTo(cell.getRowIndex()); } } }); } @Override protected Object getRowItemKey(final ChangeInfo item) { return item.legacyId(); } @Override protected void onOpenRow(final int row) { final ChangeInfo c = getRowItem(row); final Change.Id id = c.legacyId(); Gerrit.display(PageLinks.toChange(id)); } private void insertNoneRow(final int row) { insertRow(row); table.setText(row, 0, Util.C.changeTableNone()); final FlexCellFormatter fmt = table.getFlexCellFormatter(); fmt.setColSpan(row, 0, columns); fmt.setStyleName(row, 0, Gerrit.RESOURCES.css().emptySection()); } private void insertChangeRow(final int row) { insertRow(row); applyDataRowStyle(row); } @Override protected void applyDataRowStyle(final int row) { super.applyDataRowStyle(row); final CellFormatter fmt = table.getCellFormatter(); fmt.addStyleName(row, C_STAR, Gerrit.RESOURCES.css().iconCell()); for (int i = C_ID; i < columns; i++) { fmt.addStyleName(row, i, Gerrit.RESOURCES.css().dataCell()); } if (!showLegacyId) { fmt.addStyleName(row, C_ID, Gerrit.RESOURCES.css().dataCellHidden()); } fmt.addStyleName(row, C_SUBJECT, Gerrit.RESOURCES.css().cSUBJECT()); fmt.addStyleName(row, C_STATUS, Gerrit.RESOURCES.css().cSTATUS()); fmt.addStyleName(row, C_OWNER, Gerrit.RESOURCES.css().cOWNER()); fmt.addStyleName(row, C_LAST_UPDATE, Gerrit.RESOURCES.css().cLastUpdate()); fmt.addStyleName(row, C_SIZE, Gerrit.RESOURCES.css().cSIZE()); for (int i = C_SIZE + 1; i < columns; i++) { fmt.addStyleName(row, i, Gerrit.RESOURCES.css().cAPPROVAL()); } } public void updateColumnsForLabels(ChangeList... lists) { labelNames = new ArrayList<>(); for (ChangeList list : lists) { for (int i = 0; i < list.length(); i++) { for (String name : list.get(i).labels()) { if (!labelNames.contains(name)) { labelNames.add(name); } } } } Collections.sort(labelNames); int baseColumns = BASE_COLUMNS; if (baseColumns + labelNames.size() < columns) { int n = columns - (baseColumns + labelNames.size()); for (int row = 0; row < table.getRowCount(); row++) { table.removeCells(row, columns, n); } } columns = baseColumns + labelNames.size(); FlexCellFormatter fmt = table.getFlexCellFormatter(); for (int i = 0; i < labelNames.size(); i++) { String name = labelNames.get(i); int col = baseColumns + i; String abbrev = getAbbreviation(name, "-"); table.setText(0, col, abbrev); table.getCellFormatter().getElement(0, col).setTitle(name); fmt.addStyleName(0, col, Gerrit.RESOURCES.css().dataHeader()); } for (Section s : sections) { if (s.titleRow >= 0) { fmt.setColSpan(s.titleRow, 0, columns); } } } private void populateChangeRow(final int row, final ChangeInfo c, boolean highlightUnreviewed) { CellFormatter fmt = table.getCellFormatter(); if (Gerrit.isSignedIn()) { table.setWidget(row, C_STAR, StarredChanges.createIcon( c.legacyId(), c.starred())); } table.setWidget(row, C_ID, new TableChangeLink(String.valueOf(c.legacyId()), c)); String subject = Util.cropSubject(c.subject()); table.setWidget(row, C_SUBJECT, new TableChangeLink(subject, c)); Change.Status status = c.status(); if (status != Change.Status.NEW) { table.setText(row, C_STATUS, Util.toLongString(status)); } else if (!c.mergeable()) { table.setText(row, C_STATUS, Util.C.changeTableNotMergeable()); } if (c.owner() != null) { table.setWidget(row, C_OWNER, new AccountLinkPanel(c.owner(), status)); } else { table.setText(row, C_OWNER, ""); } table.setWidget(row, C_PROJECT, new ProjectLink(c.projectNameKey())); table.setWidget(row, C_BRANCH, new BranchLink(c.projectNameKey(), c .status(), c.branch(), c.topic())); if (Gerrit.getUserPreferences().relativeDateInChangeTable()) { table.setText(row, C_LAST_UPDATE, relativeFormat(c.updated())); } else { table.setText(row, C_LAST_UPDATE, shortFormat(c.updated())); } int col = C_SIZE; if (!Gerrit.getUserPreferences().sizeBarInChangeTable()) { table.setText(row, col, Util.M.insertionsAndDeletions(c.insertions(), c.deletions())); } else { table.setWidget(row, col, getSizeWidget(c)); fmt.getElement(row, col).setTitle( Util.M.insertionsAndDeletions(c.insertions(), c.deletions())); } col++; for (int idx = 0; idx < labelNames.size(); idx++, col++) { String name = labelNames.get(idx); LabelInfo label = c.label(name); if (label == null) { fmt.getElement(row, col).setTitle(Gerrit.C.labelNotApplicable()); fmt.addStyleName(row, col, Gerrit.RESOURCES.css().labelNotApplicable()); continue; } String user; String info; ReviewCategoryStrategy reviewCategoryStrategy = Gerrit.getUserPreferences().reviewCategoryStrategy(); if (label.rejected() != null) { user = label.rejected().name(); info = getReviewCategoryDisplayInfo(reviewCategoryStrategy, label.rejected()); if (info != null) { FlowPanel panel = new FlowPanel(); panel.add(new Image(Gerrit.RESOURCES.redNot())); panel.add(new InlineLabel(info)); table.setWidget(row, col, panel); } else { table.setWidget(row, col, new Image(Gerrit.RESOURCES.redNot())); } } else if (label.approved() != null) { user = label.approved().name(); info = getReviewCategoryDisplayInfo(reviewCategoryStrategy, label.approved()); if (info != null) { FlowPanel panel = new FlowPanel(); panel.add(new Image(Gerrit.RESOURCES.greenCheck())); panel.add(new InlineLabel(info)); table.setWidget(row, col, panel); } else { table.setWidget(row, col, new Image(Gerrit.RESOURCES.greenCheck())); } } else if (label.disliked() != null) { user = label.disliked().name(); info = getReviewCategoryDisplayInfo(reviewCategoryStrategy, label.disliked()); String vstr = String.valueOf(label._value()); if (info != null) { vstr = vstr + " " + info; } fmt.addStyleName(row, col, Gerrit.RESOURCES.css().negscore()); table.setText(row, col, vstr); } else if (label.recommended() != null) { user = label.recommended().name(); info = getReviewCategoryDisplayInfo(reviewCategoryStrategy, label.recommended()); String vstr = "+" + label._value(); if (info != null) { vstr = vstr + " " + info; } fmt.addStyleName(row, col, Gerrit.RESOURCES.css().posscore()); table.setText(row, col, vstr); } else { table.clearCell(row, col); continue; } fmt.addStyleName(row, col, Gerrit.RESOURCES.css().singleLine()); if (user != null) { // Some web browsers ignore the embedded newline; some like it; // so we include a space before the newline to accommodate both. fmt.getElement(row, col).setTitle(name + " \nby " + user); } } boolean needHighlight = false; if (highlightUnreviewed && !c.reviewed()) { needHighlight = true; } final Element tr = fmt.getElement(row, 0).getParentElement(); UIObject.setStyleName(tr, Gerrit.RESOURCES.css().needsReview(), needHighlight); setRowItem(row, c); } private static String getReviewCategoryDisplayInfo( ReviewCategoryStrategy reviewCategoryStrategy, AccountInfo accountInfo) { switch (reviewCategoryStrategy) { case NAME: return accountInfo.name(); case EMAIL: return accountInfo.email(); case USERNAME: return accountInfo.username(); case ABBREV: return getAbbreviation(accountInfo.name(), " "); case NONE: default: return null; } } private static String getAbbreviation(String name, String token) { StringBuilder abbrev = new StringBuilder(); if (name != null) { for (String t : name.split(token)) { abbrev.append(t.substring(0, 1).toUpperCase()); } } return abbrev.toString(); } private static Widget getSizeWidget(ChangeInfo c) { int largeChangeSize = Gerrit.info().change().largeChange(); int changedLines = c.insertions() + c.deletions(); int p = 100; if (changedLines < largeChangeSize) { p = changedLines * 100 / largeChangeSize; } int width = Math.max(2, 70 * p / 100); int red = p >= 50 ? 255 : (int) Math.round((p) * 5.12); int green = p <= 50 ? 255 : (int) Math.round(256 - (p - 50) * 5.12); String bg = "#" + toHex(red) + toHex(green) + "00"; SimplePanel panel = new SimplePanel(); panel.setStyleName(Gerrit.RESOURCES.css().changeSize()); panel.setWidth(width + "px"); panel.getElement().getStyle().setBackgroundColor(bg); return panel; } private static String toHex(int i) { String hex = Integer.toHexString(i); return hex.length() == 1 ? "0" + hex : hex; } public void addSection(final Section s) { assert s.parent == null; s.parent = this; s.titleRow = table.getRowCount(); if (s.displayTitle()) { final FlexCellFormatter fmt = table.getFlexCellFormatter(); fmt.setColSpan(s.titleRow, 0, columns); fmt.addStyleName(s.titleRow, 0, Gerrit.RESOURCES.css().sectionHeader()); } else { s.titleRow = -1; } s.dataBegin = table.getRowCount(); insertNoneRow(s.dataBegin); sections.add(s); } private int insertRow(final int beforeRow) { for (final Section s : sections) { if (beforeRow <= s.titleRow) { s.titleRow++; } if (beforeRow < s.dataBegin) { s.dataBegin++; } } return table.insertRow(beforeRow); } private void removeRow(final int row) { for (final Section s : sections) { if (row < s.titleRow) { s.titleRow--; } if (row < s.dataBegin) { s.dataBegin--; } } table.removeRow(row); } public class StarKeyCommand extends NeedsSignInKeyCommand { public StarKeyCommand(int mask, char key, String help) { super(mask, key, help); } @Override public void onKeyPress(final KeyPressEvent event) { int row = getCurrentRow(); ChangeInfo c = getRowItem(row); if (c != null && Gerrit.isSignedIn()) { ((StarredChanges.Icon) table.getWidget(row, C_STAR)).toggleStar(); } } } private final class TableChangeLink extends ChangeLink { private TableChangeLink(final String text, final ChangeInfo c) { super(text, c.legacyId()); } @Override public void go() { movePointerTo(cid); super.go(); } } public static class Section { ChangeTable parent; String titleText; Widget titleWidget; int titleRow = -1; int dataBegin; int rows; private boolean highlightUnreviewed; public void setHighlightUnreviewed(boolean value) { this.highlightUnreviewed = value; } public void setTitleText(final String text) { titleText = text; titleWidget = null; if (titleRow >= 0) { parent.table.setText(titleRow, 0, titleText); } } public void setTitleWidget(final Widget title) { titleWidget = title; titleText = null; if (titleRow >= 0) { parent.table.setWidget(titleRow, 0, title); } } public boolean displayTitle() { if (titleText != null) { setTitleText(titleText); return true; } else if (titleWidget != null) { setTitleWidget(titleWidget); return true; } return false; } public void display(ChangeList changeList) { final int sz = changeList != null ? changeList.length() : 0; final boolean hadData = rows > 0; if (hadData) { while (sz < rows) { parent.removeRow(dataBegin); rows--; } } else { parent.removeRow(dataBegin); } if (sz == 0) { parent.insertNoneRow(dataBegin); return; } while (rows < sz) { parent.insertChangeRow(dataBegin + rows); rows++; } for (int i = 0; i < sz; i++) { parent.populateChangeRow(dataBegin + i, changeList.get(i), highlightUnreviewed); } } } }
/******************************************************************************* * Copyright (c) 2011, Daniel Murphy * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. ******************************************************************************/ /* * JBox2D - A Java Port of Erin Catto's Box2D * * JBox2D homepage: http://jbox2d.sourceforge.net/ * Box2D homepage: http://www.box2d.org * * This software is provided 'as-is', without any express or implied * warranty. In no event will the authors be held liable for any damages * arising from the use of this software. * * Permission is granted to anyone to use this software for any purpose, * including commercial applications, and to alter it and redistribute it * freely, subject to the following restrictions: * * 1. The origin of this software must not be misrepresented; you must not * claim that you wrote the original software. If you use this software * in a product, an acknowledgment in the product documentation would be * appreciated but is not required. * 2. Altered source versions must be plainly marked as such, and must not be * misrepresented as being the original software. * 3. This notice may not be removed or altered from any source distribution. */ package org.jbox2d.collision.shapes; import org.jbox2d.collision.AABB; import org.jbox2d.collision.RayCastInput; import org.jbox2d.collision.RayCastOutput; import org.jbox2d.common.Mat22; import org.jbox2d.common.Settings; import org.jbox2d.common.Transform; import org.jbox2d.common.Vec2; //Updated to rev 100 /** * A convex polygon shape. Create using Body.createShape(ShapeDef), not the * ructor here. */ public class PolygonShape extends Shape { /** Dump lots of debug information. */ private static boolean m_debug = false; /** * Local position of the shape centroid in parent body frame. */ public final Vec2 m_centroid = new Vec2(); /** * The vertices of the shape. Note: use getVertexCount(), not * m_vertices.length, to get number of active vertices. */ public final Vec2 m_vertices[]; /** * The normals of the shape. Note: use getVertexCount(), not * m_normals.length, to get number of active normals. */ public final Vec2 m_normals[]; /** * Number of active vertices in the shape. */ public int m_vertexCount; // pooling private final Vec2 pool1 = new Vec2(); private final Vec2 pool2 = new Vec2(); private final Vec2 pool3 = new Vec2(); private final Vec2 pool4 = new Vec2(); private final Vec2 pool5 = new Vec2(); private final Vec2 pool6 = new Vec2(); private Transform poolt1 = new Transform(); public PolygonShape() { m_type = ShapeType.POLYGON; m_vertexCount = 0; m_vertices = new Vec2[Settings.maxPolygonVertices]; for (int i = 0; i < m_vertices.length; i++) { m_vertices[i] = new Vec2(); } m_normals = new Vec2[Settings.maxPolygonVertices]; for (int i = 0; i < m_normals.length; i++) { m_normals[i] = new Vec2(); } m_radius = Settings.polygonRadius; m_centroid.setZero(); } public final Shape clone() { PolygonShape shape = new PolygonShape(); shape.m_centroid.set(this.m_centroid); for (int i = 0; i < shape.m_normals.length; i++) { shape.m_normals[i].set(m_normals[i]); shape.m_vertices[i].set(m_vertices[i]); } shape.m_radius = this.m_radius; shape.m_vertexCount = this.m_vertexCount; return shape; } /** * Get the supporting vertex index in the given direction. * * @param d * @return */ public final int getSupport(final Vec2 d) { int bestIndex = 0; float bestValue = Vec2.dot(m_vertices[0], d); for (int i = 1; i < m_vertexCount; i++) { float value = Vec2.dot(m_vertices[i], d); if (value > bestValue) { bestIndex = i; bestValue = value; } } return bestIndex; } /** * Get the supporting vertex in the given direction. * * @param d * @return */ public final Vec2 getSupportVertex(final Vec2 d) { int bestIndex = 0; float bestValue = Vec2.dot(m_vertices[0], d); for (int i = 1; i < m_vertexCount; i++) { float value = Vec2.dot(m_vertices[i], d); if (value > bestValue) { bestIndex = i; bestValue = value; } } return m_vertices[bestIndex]; } /** * Copy vertices. This assumes the vertices define a convex polygon. It is * assumed that the exterior is the the right of each edge. */ public final void set(final Vec2[] vertices, final int count) { assert (2 <= count && count <= Settings.maxPolygonVertices); m_vertexCount = count; // Copy vertices. for (int i = 0; i < m_vertexCount; ++i) { if (m_vertices[i] == null) { m_vertices[i] = new Vec2(); } m_vertices[i].set(vertices[i]); } final Vec2 edge = pool1; // Compute normals. Ensure the edges have non-zero length. for (int i = 0; i < m_vertexCount; ++i) { final int i1 = i; final int i2 = i + 1 < m_vertexCount ? i + 1 : 0; edge.set(m_vertices[i2]).subLocal(m_vertices[i1]); assert (edge.lengthSquared() > Settings.EPSILON * Settings.EPSILON); Vec2.crossToOut(edge, 1f, m_normals[i]); m_normals[i].normalize(); } if (m_debug) { final Vec2 r = pool2; // Ensure the polygon is convex and the interior // is to the left of each edge. for (int i = 0; i < m_vertexCount; ++i) { final int i1 = i; final int i2 = i + 1 < m_vertexCount ? i + 1 : 0; edge.set(m_vertices[i2]).subLocal(m_vertices[i1]); for (int j = 0; j < m_vertexCount; ++j) { // Don't check vertices on the current edge. if (j == i1 || j == i2) { continue; } r.set(m_vertices[j]).subLocal(m_vertices[i1]); // Your polygon is non-convex (it has an indentation) or // has colinear edges. final float s = Vec2.cross(edge, r); assert (s > 0.0f); } } } // Compute the polygon centroid. computeCentroidToOut(m_vertices, m_vertexCount, m_centroid); } /** * Build vertices to represent an axis-aligned box. * * @param hx * the half-width. * @param hy * the half-height. */ public final void setAsBox(final float hx, final float hy) { m_vertexCount = 4; m_vertices[0].set(-hx, -hy); m_vertices[1].set(hx, -hy); m_vertices[2].set(hx, hy); m_vertices[3].set(-hx, hy); m_normals[0].set(0.0f, -1.0f); m_normals[1].set(1.0f, 0.0f); m_normals[2].set(0.0f, 1.0f); m_normals[3].set(-1.0f, 0.0f); m_centroid.setZero(); } /** * Build vertices to represent an oriented box. * * @param hx * the half-width. * @param hy * the half-height. * @param center * the center of the box in local coordinates. * @param angle * the rotation of the box in local coordinates. */ public final void setAsBox(final float hx, final float hy, final Vec2 center, final float angle) { m_vertexCount = 4; m_vertices[0].set(-hx, -hy); m_vertices[1].set(hx, -hy); m_vertices[2].set(hx, hy); m_vertices[3].set(-hx, hy); m_normals[0].set(0.0f, -1.0f); m_normals[1].set(1.0f, 0.0f); m_normals[2].set(0.0f, 1.0f); m_normals[3].set(-1.0f, 0.0f); m_centroid.set(center); final Transform xf = poolt1; xf.position.set(center); xf.R.set(angle); // Transform vertices and normals. for (int i = 0; i < m_vertexCount; ++i) { Transform.mulToOut(xf, m_vertices[i], m_vertices[i]); Mat22.mulToOut(xf.R, m_normals[i], m_normals[i]); } } /** * Set this as a single edge. * * @param v1 * @param v2 */ public final void setAsEdge(final Vec2 v1, final Vec2 v2) { m_vertexCount = 2; m_vertices[0].set(v1); m_vertices[1].set(v2); m_centroid.set(v1).addLocal(v2).mulLocal(0.5f); // = 0.5f * (v1 + v2); m_normals[0].set(v2).subLocal(v1); Vec2.crossToOut(m_normals[0], 1f, m_normals[0]); // m_normals[0] = Cross(v2 - v1, 1.0f); m_normals[0].normalize(); m_normals[1].set(m_normals[0]).negateLocal(); } /** * @see Shape#testPoint(Transform, Vec2) */ @Override public final boolean testPoint(final Transform xf, final Vec2 p) { final Vec2 pLocal = pool1; pLocal.set(p).subLocal(xf.position); Mat22.mulTransToOut(xf.R, pLocal, pLocal); if (m_debug) { System.out.println("--testPoint debug--"); System.out.println("Vertices: "); for (int i = 0; i < m_vertexCount; ++i) { System.out.println(m_vertices[i]); } System.out.println("pLocal: " + pLocal); } final Vec2 temp = pool2; for (int i = 0; i < m_vertexCount; ++i) { temp.set(pLocal).subLocal(m_vertices[i]); final float dot = Vec2.dot(m_normals[i], temp); if (dot > 0.0f) { return false; } } return true; } /** * @see Shape#computeAABB(AABB, Transform, int) */ @Override public final void computeAABB(final AABB argAabb, final Transform argXf) { final Vec2 lower = pool1; final Vec2 upper = pool2; final Vec2 v = pool3; Transform.mulToOut(argXf, m_vertices[0], lower); upper.set(lower); for (int i = 1; i < m_vertexCount; ++i) { Transform.mulToOut(argXf, m_vertices[i], v); // Vec2 v = Mul(xf, m_vertices[i]); Vec2.minToOut(lower, v, lower); Vec2.maxToOut(upper, v, upper); } // Vec2 r(m_radius, m_radius); // aabb->lowerBound = lower - r; // aabb->upperBound = upper + r; argAabb.lowerBound.x = lower.x - m_radius; argAabb.lowerBound.y = lower.y - m_radius; argAabb.upperBound.x = upper.x + m_radius; argAabb.upperBound.y = upper.y + m_radius; } // djm pooling, and from above /* * private static final TLVec2 tlNormalL = new TLVec2(); private static * final TLMassData tlMd = new TLMassData(); private static final FloatArray * tldepths = new FloatArray(); private static final TLVec2 tlIntoVec = new * TLVec2(); private static final TLVec2 tlOutoVec = new TLVec2(); private * static final TLVec2 tlP2b = new TLVec2(); private static final TLVec2 * tlP3 = new TLVec2(); private static final TLVec2 tlcenter = new TLVec2(); * /* * * @see Shape#computeSubmergedArea(Vec2, float, XForm, Vec2) public float * computeSubmergedArea(final Vec2 normal, float offset, Transform xf, Vec2 * c) { final Vec2 normalL = tlNormalL.get(); final MassData md = * tlMd.get(); //Transform plane into shape co-ordinates * Mat22.mulTransToOut(xf.R,normal, normalL); float offsetL = offset - * Vec2.dot(normal,xf.position); final Float[] depths = * tldepths.get(Settings.maxPolygonVertices); int diveCount = 0; int * intoIndex = -1; int outoIndex = -1; boolean lastSubmerged = false; int i * = 0; for (i = 0; i < m_vertexCount; ++i){ depths[i] = * Vec2.dot(normalL,m_vertices[i]) - offsetL; boolean isSubmerged = * depths[i]<-Settings.EPSILON; if (i > 0){ if (isSubmerged){ if * (!lastSubmerged){ intoIndex = i-1; diveCount++; } } else{ if * (lastSubmerged){ outoIndex = i-1; diveCount++; } } } lastSubmerged = * isSubmerged; } switch(diveCount){ case 0: if (lastSubmerged){ * //Completely submerged computeMass(md, 1.0f); * Transform.mulToOut(xf,md.center, c); return md.mass; } else{ return 0; } * case 1: if(intoIndex==-1){ intoIndex = m_vertexCount-1; } else{ outoIndex * = m_vertexCount-1; } break; } final Vec2 intoVec = tlIntoVec.get(); final * Vec2 outoVec = tlOutoVec.get(); final Vec2 e1 = tle1.get(); final Vec2 e2 * = tle2.get(); int intoIndex2 = (intoIndex+1) % m_vertexCount; int * outoIndex2 = (outoIndex+1) % m_vertexCount; float intoLambda = (0 - * depths[intoIndex]) / (depths[intoIndex2] - depths[intoIndex]); float * outoLambda = (0 - depths[outoIndex]) / (depths[outoIndex2] - * depths[outoIndex]); * intoVec.set(m_vertices[intoIndex].x*(1-intoLambda)+m_vertices * [intoIndex2].x*intoLambda , * m_vertices[intoIndex].y*(1-intoLambda)+m_vertices * [intoIndex2].y*intoLambda); * outoVec.set(m_vertices[outoIndex].x*(1-outoLambda * )+m_vertices[outoIndex2].x*outoLambda , * m_vertices[outoIndex].y*(1-outoLambda * )+m_vertices[outoIndex2].y*outoLambda); // Initialize accumulator float * area = 0; final Vec2 center = tlcenter.get(); center.setZero(); final * Vec2 p2b = tlP2b.get().set(m_vertices[intoIndex2]); final Vec2 p3 = * tlP3.get(); p3.setZero(); float k_inv3 = 1.0f / 3.0f; // An awkward loop * from intoIndex2+1 to outIndex2 i = intoIndex2; while (i != outoIndex2){ i * = (i+1) % m_vertexCount; if (i == outoIndex2){ p3.set(outoVec); } else{ * p3.set(m_vertices[i]); } // Add the triangle formed by intoVec,p2,p3 { * e1.set(p2b).subLocal(intoVec); e2.set(p3).subLocal(intoVec); float D = * Vec2.cross(e1, e2); float triangleArea = 0.5f * D; area += triangleArea; * // Area weighted centroid center.x += triangleArea * k_inv3 * (intoVec.x * + p2b.x + p3.x); center.y += triangleArea * k_inv3 * (intoVec.y + p2b.y + * p3.y); } // p2b.set(p3); } // Normalize and transform centroid center.x * *= 1.0f / area; center.y *= 1.0f / area; Transform.mulToOut(xf, center, * c); return area; } */ /* * Get the supporting vertex index in the given direction. * * @param d * * @return public final int getSupport( final Vec2 d){ int bestIndex = 0; * float bestValue = Vec2.dot(m_vertices[0], d); for (int i = 1; i < * m_vertexCount; ++i){ final float value = Vec2.dot(m_vertices[i], d); if * (value > bestValue){ bestIndex = i; bestValue = value; } } return * bestIndex; } /** Get the supporting vertex in the given direction. * * @param d * * @return public final Vec2 getSupportVertex( final Vec2 d){ int bestIndex * = 0; float bestValue = Vec2.dot(m_vertices[0], d); for (int i = 1; i < * m_vertexCount; ++i){ final float value = Vec2.dot(m_vertices[i], d); if * (value > bestValue){ bestIndex = i; bestValue = value; } } return * m_vertices[bestIndex]; } */ /** * Get the vertex count. * * @return */ public final int getVertexCount() { return m_vertexCount; } /** * Get a vertex by index. * * @param index * @return */ public final Vec2 getVertex(final int index) { assert (0 <= index && index < m_vertexCount); return m_vertices[index]; } /** * @see org.jbox2d.collision.shapes.Shape#raycast(org.jbox2d.collision.RayCastOutput, * org.jbox2d.collision.RayCastInput, org.jbox2d.common.Transform, int) */ @Override public final boolean raycast(RayCastOutput argOutput, RayCastInput argInput, Transform argXf) { final Vec2 p1 = pool1; final Vec2 p2 = pool2; final Vec2 d = pool3; final Vec2 temp = pool4; p1.set(argInput.p1).subLocal(argXf.position); Mat22.mulTransToOut(argXf.R, p1, p1); p2.set(argInput.p2).subLocal(argXf.position); Mat22.mulTransToOut(argXf.R, p2, p2); d.set(p2).subLocal(p1); if (m_vertexCount == 2) { Vec2 v1 = m_vertices[0]; Vec2 v2 = m_vertices[1]; Vec2 normal = m_normals[0]; // q = p1 + t * d // dot(normal, q - v1) = 0 // dot(normal, p1 - v1) + t * dot(normal, d) = 0 temp.set(v1).subLocal(p1); float numerator = Vec2.dot(normal, temp); float denominator = Vec2.dot(normal, d); if (denominator == 0.0f) { return false; } float t = numerator / denominator; if (t < 0.0f || 1.0f < t) { return false; } final Vec2 q = pool5; final Vec2 r = pool6; // Vec2 q = p1 + t * d; temp.set(d).mulLocal(t); q.set(p1).addLocal(temp); // q = v1 + s * r // s = dot(q - v1, r) / dot(r, r) // Vec2 r = v2 - v1; r.set(v2).subLocal(v1); float rr = Vec2.dot(r, r); if (rr == 0.0f) { return false; } temp.set(q).subLocal(v1); float s = Vec2.dot(temp, r) / rr; if (s < 0.0f || 1.0f < s) { return false; } argOutput.fraction = t; if (numerator > 0.0f) { // argOutput.normal = -normal; argOutput.normal.set(normal).mulLocal(-1); } else { // output.normal = normal; argOutput.normal.set(normal); } return true; } else { float lower = 0, upper = argInput.maxFraction; int index = -1; for (int i = 0; i < m_vertexCount; ++i) { // p = p1 + a * d // dot(normal, p - v) = 0 // dot(normal, p1 - v) + a * dot(normal, d) = 0 temp.set(m_vertices[i]).subLocal(p1); final float numerator = Vec2.dot(m_normals[i], temp); final float denominator = Vec2.dot(m_normals[i], d); if (denominator == 0.0f) { if (numerator < 0.0f) { return false; } } else { // Note: we want this predicate without division: // lower < numerator / denominator, where denominator < 0 // Since denominator < 0, we have to flip the inequality: // lower < numerator / denominator <==> denominator * lower // > // numerator. if (denominator < 0.0f && numerator < lower * denominator) { // Increase lower. // The segment enters this half-space. lower = numerator / denominator; index = i; } else if (denominator > 0.0f && numerator < upper * denominator) { // Decrease upper. // The segment exits this half-space. upper = numerator / denominator; } } if (upper < lower) { return false; } } assert (0.0f <= lower && lower <= argInput.maxFraction); if (index >= 0) { argOutput.fraction = lower; Mat22.mulToOut(argXf.R, m_normals[index], argOutput.normal); // normal = Mul(xf.R, m_normals[index]); return true; } } return false; } public final void computeCentroidToOut(final Vec2[] vs, final int count, final Vec2 out) { assert (count >= 3); out.set(0.0f, 0.0f); float area = 0.0f; if (count == 2) { out.set(vs[0]).addLocal(vs[1]).mulLocal(.5f); return; } // pRef is the reference point for forming triangles. // It's location doesn't change the result (except for rounding error). final Vec2 pRef = pool1; pRef.setZero(); final Vec2 e1 = pool2; final Vec2 e2 = pool3; final float inv3 = 1.0f / 3.0f; for (int i = 0; i < count; ++i) { // Triangle vertices. final Vec2 p1 = pRef; final Vec2 p2 = vs[i]; final Vec2 p3 = i + 1 < count ? vs[i + 1] : vs[0]; e1.set(p2).subLocal(p1); e2.set(p3).subLocal(p1); final float D = Vec2.cross(e1, e2); final float triangleArea = 0.5f * D; area += triangleArea; // Area weighted centroid e1.set(p1).addLocal(p2).addLocal(p3).mulLocal(triangleArea * inv3); out.addLocal(e1); } // Centroid assert (area > Settings.EPSILON); out.mulLocal(1.0f / area); } /** * @see Shape#computeMass(MassData) */ public void computeMass(final MassData massData, float density) { // Polygon mass, centroid, and inertia. // Let rho be the polygon density in mass per unit area. // Then: // mass = rho * int(dA) // centroid.x = (1/mass) * rho * int(x * dA) // centroid.y = (1/mass) * rho * int(y * dA) // I = rho * int((x*x + y*y) * dA) // // We can compute these integrals by summing all the integrals // for each triangle of the polygon. To evaluate the integral // for a single triangle, we make a change of variables to // the (u,v) coordinates of the triangle: // x = x0 + e1x * u + e2x * v // y = y0 + e1y * u + e2y * v // where 0 <= u && 0 <= v && u + v <= 1. // // We integrate u from [0,1-v] and then v from [0,1]. // We also need to use the Jacobian of the transformation: // D = cross(e1, e2) // // Simplification: triangle centroid = (1/3) * (p1 + p2 + p3) // // The rest of the derivation is handled by computer algebra. assert (m_vertexCount >= 2); // A line segment has zero mass. if (m_vertexCount == 2) { // massData.center = 0.5f * (m_vertices[0] + m_vertices[1]); massData.center.set(m_vertices[0]).addLocal(m_vertices[1]) .mulLocal(0.5f); massData.mass = 0.0f; massData.I = 0.0f; return; } final Vec2 center = pool1; center.setZero(); float area = 0.0f; float I = 0.0f; // pRef is the reference point for forming triangles. // It's location doesn't change the result (except for rounding error). final Vec2 pRef = pool2; pRef.setZero(); final float k_inv3 = 1.0f / 3.0f; final Vec2 e1 = pool3; final Vec2 e2 = pool4; for (int i = 0; i < m_vertexCount; ++i) { // Triangle vertices. final Vec2 p1 = pRef; final Vec2 p2 = m_vertices[i]; final Vec2 p3 = i + 1 < m_vertexCount ? m_vertices[i + 1] : m_vertices[0]; e1.set(p2); e1.subLocal(p1); e2.set(p3); e2.subLocal(p1); final float D = Vec2.cross(e1, e2); final float triangleArea = 0.5f * D; area += triangleArea; // Area weighted centroid center.x += triangleArea * k_inv3 * (p1.x + p2.x + p3.x); center.y += triangleArea * k_inv3 * (p1.y + p2.y + p3.y); final float px = p1.x, py = p1.y; final float ex1 = e1.x, ey1 = e1.y; final float ex2 = e2.x, ey2 = e2.y; final float intx2 = k_inv3 * (0.25f * (ex1 * ex1 + ex2 * ex1 + ex2 * ex2) + (px * ex1 + px * ex2)) + 0.5f * px * px; final float inty2 = k_inv3 * (0.25f * (ey1 * ey1 + ey2 * ey1 + ey2 * ey2) + (py * ey1 + py * ey2)) + 0.5f * py * py; I += D * (intx2 + inty2); } // Total mass massData.mass = density * area; // Center of mass assert (area > Settings.EPSILON); center.mulLocal(1.0f / area); massData.center.set(center); // Inertia tensor relative to the local origin. massData.I = I * density; } /* * Get the local centroid relative to the parent body. / public Vec2 * getCentroid() { return m_centroid.clone(); } */ /** Get the vertices in local coordinates. */ public Vec2[] getVertices() { return m_vertices; } /** Get the edge normal vectors. There is one for each vertex. */ public Vec2[] getNormals() { return m_normals; } /** Get the centroid and apply the supplied transform. */ public Vec2 centroid(final Transform xf) { return Transform.mul(xf, m_centroid); } /** Get the centroid and apply the supplied transform. */ public Vec2 centroidToOut(final Transform xf, final Vec2 out) { Transform.mulToOut(xf, m_centroid, out); return out; } }
/* Copyright 1996-2008 Ariba, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. $Id: //ariba/platform/util/core/ariba/util/io/Mapping.java#9 $ */ package ariba.util.io; import java.io.IOException; import java.net.URL; import java.util.Iterator; import java.util.List; import java.util.Map; import ariba.util.core.ListUtil; import ariba.util.core.MapUtil; /** Read in mapping file (value, mappedValue) and provide mapping. The mapping file is a csv file, with the first 2 columns of each row representing the key and its corresponding value. Any rows with less than 2 columns are skipped, and extra columns in exess of 2 of each row are also skipped. <p> The file can optionally include in its first row the encoding scheme to use. For exmaple, the line "8859_1,," specifies 8859_1 encoding is to be used. For such cases, this class should be instantiated with the encoding parameter being null.</p> <p> Most files also contain in its first line (if no encoding is specified) or 2nd line (if encoding is specified) header information (such as the names of the columns). This header will not be read. However, some files does not contain the header. For these files, the class should be instantiated with the hasHeader parameter being false, otherwise, the first key-value pair will be skipped.</p> <p> Comments are indicated by a leading '#' character. <b>Note that the keys (that is the first column of each row) must not begin with '#'.</b> Currently even wrapping quotes around '#' will be treated as comments. Any comments if any, must appear after the encoding line (if any) and the header (if any).</p> <p> <b>Note that this class is not synchronized. </b> It is expected that the mapping be first generated when the mapping file is read. Only after that should the key-value pairs (or the entire map) be accessed. </p> @aribaapi documented */ public final class Mapping implements CSVConsumer { private static final int ColumnFrom = 0; private static final int ColumnTo = 1; private final CSVReader csvReader = new CSVReader(this); private CommentChecker commentchecker = new CommentChecker(null); private final Map map = MapUtil.map(); /** The reverse map is used to lookup the key for the value. Since multiple keys could be mapped to a <code>value</code>, the keys are stored in a list. @aribaapi private */ private final Map/*<String,List>*/ reverseMap = MapUtil.map(); private URL fileURL; private String encoding; private String defaultValue; private boolean hasHeader = true; /** Instantiates an instance of this class. @param fileURL the URL of the mapping file. Must be a valid URL. @param defaultValue the value to be used as the value of any key that does not have any mapped value. @param hasHeader if true, specifies that the mapping file has a header (which is the first line of the file if no encoding is specified, or the second line in the file if encoding is specified). The header, if present, will not be read. @aribaapi documented */ public Mapping (URL fileURL, String defaultValue, boolean hasHeader) { this(fileURL, null, defaultValue, hasHeader); } /** Instantiates an instance of this class. @param fileURL the URL of the mapping file. Must be a valid URL. @param encoding the encoding to be used. If null and if the encoding specified in the file is a supported encoding, it will be used. If it is not a supported encoding, the system default encoding will be used. @param defaultValue the value to be used as the value of any key that does not have any mapped value. @aribaapi documented */ public Mapping (URL fileURL, String encoding, String defaultValue) { this(fileURL, encoding, defaultValue, true); } /** Instantiates an instance of this class. @param fileURL the URL of the mapping file. Must be a valid URL. @param encoding the encoding to be used. If null and if the encoding specified in the file is a supported encoding, it will be used. If it is not a supported encoding, the system default encoding will be used. @param defaultValue the value to be used as the value of any key that does not have any mapped value. @param hasHeader if true, specifies that the mapping file has a header (which is the first line of the file if no encoding is specified, or the second line in the file if encoding is specified). The header, if present, will not be read. @aribaapi documented */ public Mapping (URL fileURL, String encoding, String defaultValue, boolean hasHeader) { this.fileURL = fileURL; this.encoding = encoding; this.defaultValue = defaultValue; this.hasHeader = hasHeader; } /** Reads the contents of the mapping file. @exception IOException when I/O errors occurs. @see #map @aribaapi documented */ public void read () throws IOException { if (encoding == null) { csvReader.readForSpecifiedEncoding(fileURL); } else { csvReader.read(fileURL, encoding); } immutate(); } /** Returns the value of the given key. Note that read should have been called before calling this method. @param key the key to map. @see #read @see #mapCopy @return the mapped value @aribaapi documented */ public String map (String key) { if (key == null) { return defaultValue(null); } String mappedValue = (String)map.get(key); if (mappedValue == null) { return defaultValue(key); } return mappedValue; } /** Returns the list of keys for the given value. It could return null or empty list on a not matched case. The returned list is an immutable list. Note that read should have been called before calling this method. @param value the value to map. @see #read @see #mapCopy @return the mapped list of keys @aribaapi ariba */ public List/*<String>*/ reverseMap (String value) { if (value == null) { return null; } return (List)reverseMap.get(value); } /** Returns a copy of the map. @see #map @return a copy of the map. @aribaapi documented */ public Map mapCopy () { return MapUtil.copyMap(map); } private String defaultValue (String value) { return (defaultValue == null) ? value : defaultValue; } public void consumeLineOfTokens (String filePath, int lineNumber, List line) { // ignore the first line if the file has a header if (lineNumber == 1 && hasHeader) { return; } if (commentchecker.isComment(line)) { return; } if (ColumnTo < line.size()) { Object key = line.get(ColumnFrom); Object value = line.get(ColumnTo); map.put(key, value); List keyList = (List)reverseMap.get(value); if (keyList == null) { keyList = ListUtil.list(); reverseMap.put(value, keyList); } ListUtil.addElementIfAbsent(keyList, key); } } /** immutable the internal data structure. */ private void immutate () { // make the list in reverseMap immutable. Iterator iter = reverseMap.keySet().iterator(); while (iter.hasNext()) { String key = (String)iter.next(); List list = (List)reverseMap.get(key); reverseMap.put(key, ListUtil.immutableList(list)); } } }
package com.gsmarshall.teerecorder.client.view; import com.gsmarshall.teerecorder.client.news.NewsReceiver; import com.gsmarshall.teerecorder.client.news.NotFoundNewsException; import com.gsmarshall.teerecorder.client.recording.service.exception.NotSupportedFileException; import com.gsmarshall.teerecorder.client.recording.service.PlayService; import com.gsmarshall.teerecorder.client.recording.service.RecordService; import com.gsmarshall.teerecorder.client.recording.view.ChooseFileViewer; import com.gsmarshall.teerecorder.client.recording.view.NotificationViewer; import com.gsmarshall.teerecorder.client.recording.view.TimerViewer; import com.gsmarshall.teerecorder.server.news.entity.News; import javafx.application.Platform; import javafx.collections.FXCollections; import javafx.collections.ObservableList; import javafx.event.EventHandler; import javafx.fxml.FXML; import javafx.fxml.Initializable; import javafx.scene.Cursor; import javafx.scene.control.*; import javafx.scene.image.Image; import javafx.scene.image.ImageView; import javafx.scene.input.KeyCode; import javafx.scene.input.KeyCodeCombination; import javafx.scene.input.KeyCombination; import javafx.scene.input.KeyEvent; import javafx.scene.layout.VBox; import javafx.stage.Stage; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.net.URL; import java.util.List; import java.util.ResourceBundle; /** * Controller for client-application.fxml file. */ public class ClientController implements Initializable { /* News tab */ @FXML private Label labelNewsInfo; @FXML private ListView<News> listViewNews; /* Record tab */ @FXML private Button buttonRecordPause; @FXML private TextArea textAreaRecord; /* Play tab */ @FXML private TextArea textAreaPlay; @FXML private Button buttonPlayPause; @FXML private Label notificationLabel; @FXML private Label timerLabel; @FXML private VBox bottomVbox; private NewsReceiver newsReceiver; private RecordService recordService; private PlayService playService; private NotificationViewer notificationViewer; private TimerViewer timerViewer; private Stage stage; @Override public void initialize(URL url, ResourceBundle resourceBundle) { try { initController(); } catch (FileNotFoundException e) { e.printStackTrace(); } setLatestNews(); } private void initController() throws FileNotFoundException { newsReceiver = new NewsReceiver(); recordService = new RecordService(); notificationViewer = new NotificationViewer(notificationLabel); timerViewer = new TimerViewer(timerLabel); playService = new PlayService(timerViewer, notificationViewer); notificationViewer.showInfo("Hello in Teerecorder!"); // Image image = new Image("test.png"); // ImageView iv = new ImageView(image); // recordTab.setGraphic(iv); } private void setLatestNews() { labelNewsInfo.setVisible(false); new Thread(new Runnable() { @Override public void run() { try { List<News> latestNews = null; latestNews = newsReceiver.getLatestNews(); final ObservableList<News> items = FXCollections.observableArrayList(latestNews); Platform.runLater(new Runnable() { @Override public void run() { listViewNews.setItems(items); } }); } catch (NotFoundNewsException e) { Platform.runLater(new Runnable() { @Override public void run() { labelNewsInfo.setText("Cannot download news from server."); labelNewsInfo.setVisible(true); } }); } } }).start(); } @FXML private void selectNewsTab() { if (bottomVbox != null) bottomVbox.setVisible(false); setLatestNews(); } @FXML private void selectAboutTab() { if (bottomVbox != null) bottomVbox.setVisible(false); } @FXML private void selectRecordTab() { if (bottomVbox != null) bottomVbox.setVisible(true); } @FXML private void selectPlayTab() { if (bottomVbox != null) bottomVbox.setVisible(true); } @FXML private void buttonRefreshNewsAction() { setLatestNews(); } @FXML private void typedKeyAction(KeyEvent keyEvent) { recordService.typedKey(keyEvent); } @FXML private void recordStopAction() { if (!recordService.getRecordState()) { notificationViewer.showWarning("Nothing to stop"); } else if (recordService.getRecordPauseState()) { buttonRecordPause.setText("Pause"); } if (recordService.stop()) { timerViewer.stop(); notificationViewer.showInfo("Stopped recording"); } } @FXML private void recordStartAction() { if (playService.getPlayState()) { notificationViewer.showError("You're playing text now"); return; } File file = null; if (!recordService.getRecordState()) { file = ChooseFileViewer.getSaveFile(stage); if (file == null) { //notificationViewer.showError("No file selected to save"); //return; file = new File("tempfile.bin"); } timerViewer.start(); notificationViewer.showInfo("Started recording"); } else { notificationViewer.showWarning("You already started recording"); } recordService.start(textAreaRecord, textAreaRecord.getCaretPosition(), file); } @FXML private void recordPauseAction() { if (recordService.getRecordState()) { if (!recordService.getRecordPauseState()) { buttonRecordPause.setText("Unpause"); timerViewer.pause(); notificationViewer.showInfo("Paused recording"); } else { buttonRecordPause.setText("Pause"); timerViewer.unPause(); notificationViewer.showInfo("Unpaused recording"); } } else { notificationViewer.showWarning("Nothing to pause"); } recordService.pause(textAreaRecord.getText(), textAreaRecord.getCursor()); } @FXML private void playStartAction() { if (recordService.getRecordState()) { notificationViewer.showError("You're recording text now"); return; } File file = null; if (playService.getPlayState()) { notificationViewer.showWarning("You're playing already"); return; } else { file = ChooseFileViewer.getOpenFile(stage); if (file == null) { //notificationViewer.showError("No file selected to play"); //return; file = new File("tempfile.bin"); } } try { playService.play(file, textAreaPlay); timerViewer.start(); } catch (NotSupportedFileException e) { notificationViewer.showError("Selected bad file"); } } @FXML private void playPauseAction() { if (playService.getPlayState()) { if (!playService.getPlayPauseState()) { timerViewer.pause(); playService.pause(); buttonPlayPause.setText("Unpause"); notificationViewer.showInfo("Paused playing"); } else { timerViewer.unPause(); playService.unPause(); buttonPlayPause.setText("Pause"); notificationViewer.showInfo("Unpaused playing"); } } else { notificationViewer.showWarning("You aren't playing now"); } } @FXML private void playStopAction() { if (!playService.getPlayState()) { notificationViewer.showWarning("Nothing to stop"); return; } else if (playService.getPlayPauseState()) { buttonPlayPause.setText("Pause"); } timerViewer.stop(); playService.stop(); notificationViewer.showInfo("Stopped playing"); } // setting up shortcuts private void setStageEvents() { // for recording final KeyCombination startRecordingKeys = new KeyCodeCombination(KeyCode.S, KeyCombination.CONTROL_DOWN); final KeyCombination pauseRecordingKeys = new KeyCodeCombination(KeyCode.P, KeyCombination.CONTROL_DOWN); final KeyCombination stopRecordingKeys = new KeyCodeCombination(KeyCode.SPACE, KeyCombination.CONTROL_DOWN); // for playing final KeyCombination openPlayKeys = new KeyCodeCombination(KeyCode.Q, KeyCombination.CONTROL_DOWN); final KeyCombination pausePlayKeys = new KeyCodeCombination(KeyCode.W, KeyCombination.CONTROL_DOWN); final KeyCombination stopPlayKeys = new KeyCodeCombination(KeyCode.E, KeyCombination.CONTROL_DOWN); stage.addEventHandler(KeyEvent.KEY_RELEASED, new EventHandler<KeyEvent>() { @Override public void handle(KeyEvent event) { if (startRecordingKeys.match(event)) { recordStartAction(); } else if (pauseRecordingKeys.match(event)) { recordPauseAction(); } else if (stopRecordingKeys.match(event)) { recordStopAction(); } else if (openPlayKeys.match(event)) { playStartAction(); } else if (pausePlayKeys.match(event)) { playPauseAction(); } else if (stopPlayKeys.match(event)) { playStopAction(); } } }); } public void saveStage(Stage primaryStage) { this.stage = primaryStage; setStageEvents(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.ingest.common; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.Map; import java.util.stream.Collectors; public class CsvProcessorTests extends ESTestCase { private static final Character[] SEPARATORS = new Character[]{',', ';', '|', '.', '\t'}; private static final String[] QUOTES = new String[]{"'", "\"", ""}; private final String quote; private final char separator; public CsvProcessorTests(@Name("quote") String quote, @Name("separator") char separator) { this.quote = quote; this.separator = separator; } @ParametersFactory public static Iterable<Object[]> parameters() { LinkedList<Object[]> list = new LinkedList<>(); for (Character separator : SEPARATORS) { for (String quote : QUOTES) { list.add(new Object[]{quote, separator}); } } return list; } public void testExactNumberOfFields() { int numItems = randomIntBetween(2, 10); Map<String, String> items = new LinkedHashMap<>(); for (int i = 0; i < numItems; i++) { items.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } String[] headers = items.keySet().toArray(new String[numItems]); String csv = items.values().stream().map(v -> quote + v + quote).collect(Collectors.joining(separator + "")); IngestDocument ingestDocument = processDocument(headers, csv); items.forEach((key, value) -> assertEquals(value, ingestDocument.getFieldValue(key, String.class))); } public void testEmptyValues() { int numItems = randomIntBetween(5, 10); Map<String, String> items = new LinkedHashMap<>(); for (int i = 0; i < 3; i++) { items.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } String emptyKey = randomAlphaOfLengthBetween(5, 10); items.put(emptyKey, ""); for (int i = 0; i < numItems - 4; i++) { items.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } String[] headers = items.keySet().toArray(new String[numItems]); String csv = items.values().stream().map(v -> quote + v + quote).collect(Collectors.joining(separator + "")); IngestDocument ingestDocument = processDocument(headers, csv); items.forEach((key, value) -> { if (emptyKey.equals(key)) { assertFalse(ingestDocument.hasField(key)); } else { assertEquals(value, ingestDocument.getFieldValue(key, String.class)); } }); } public void testEmptyValuesReplace() { int numItems = randomIntBetween(5, 10); Map<String, String> items = new LinkedHashMap<>(); for (int i = 0; i < 3; i++) { items.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } String emptyKey = randomAlphaOfLengthBetween(5, 10); items.put(emptyKey, ""); for (int i = 0; i < numItems - 4; i++) { items.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } String[] headers = items.keySet().toArray(new String[numItems]); String csv = items.values().stream().map(v -> quote + v + quote).collect(Collectors.joining(separator + "")); IngestDocument ingestDocument = processDocument(headers, csv, true, ""); items.forEach((key, value) -> { if (emptyKey.equals(key)) { assertEquals("", ingestDocument.getFieldValue(key, String.class)); } else { assertEquals(value, ingestDocument.getFieldValue(key, String.class)); } }); IngestDocument ingestDocument2 = processDocument(headers, csv, true, 0); items.forEach((key, value) -> { if (emptyKey.equals(key)) { assertEquals(0, (int) ingestDocument2.getFieldValue(key, Integer.class)); } else { assertEquals(value, ingestDocument2.getFieldValue(key, String.class)); } }); } public void testLessFieldsThanHeaders() { int numItems = randomIntBetween(4, 10); Map<String, String> items = new LinkedHashMap<>(); for (int i = 0; i < numItems; i++) { items.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } String[] headers = items.keySet().toArray(new String[numItems]); String csv = items.values().stream().map(v -> quote + v + quote).limit(3).collect(Collectors.joining(separator + "")); IngestDocument ingestDocument = processDocument(headers, csv); items.keySet().stream().skip(3).forEach(key -> assertFalse(ingestDocument.hasField(key))); items.entrySet().stream().limit(3).forEach(e -> assertEquals(e.getValue(), ingestDocument.getFieldValue(e.getKey(), String.class))); } public void testLessHeadersThanFields() { int numItems = randomIntBetween(5, 10); Map<String, String> items = new LinkedHashMap<>(); for (int i = 0; i < numItems; i++) { items.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } String[] headers = items.keySet().stream().limit(3).toArray(String[]::new); String csv = items.values().stream().map(v -> quote + v + quote).collect(Collectors.joining(separator + "")); IngestDocument ingestDocument = processDocument(headers, csv); items.entrySet().stream().limit(3).forEach(e -> assertEquals(e.getValue(), ingestDocument.getFieldValue(e.getKey(), String.class))); } public void testSingleField() { String[] headers = new String[]{randomAlphaOfLengthBetween(5, 10)}; String value = randomAlphaOfLengthBetween(5, 10); String csv = quote + value + quote; IngestDocument ingestDocument = processDocument(headers, csv); assertEquals(value, ingestDocument.getFieldValue(headers[0], String.class)); } public void testEscapedQuote() { int numItems = randomIntBetween(2, 10); Map<String, String> items = new LinkedHashMap<>(); for (int i = 0; i < numItems; i++) { items.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10) + quote + quote + randomAlphaOfLengthBetween(5 , 10) + quote + quote); } String[] headers = items.keySet().toArray(new String[numItems]); String csv = items.values().stream().map(v -> quote + v + quote).collect(Collectors.joining(separator + "")); IngestDocument ingestDocument = processDocument(headers, csv); items.forEach((key, value) -> assertEquals(value.replace(quote + quote, quote), ingestDocument.getFieldValue(key, String.class))); } public void testQuotedStrings() { assumeFalse("quote needed", quote.isEmpty()); int numItems = randomIntBetween(2, 10); Map<String, String> items = new LinkedHashMap<>(); for (int i = 0; i < numItems; i++) { items.put(randomAlphaOfLengthBetween(5, 10), separator + randomAlphaOfLengthBetween(5, 10) + separator + "\n\r" + randomAlphaOfLengthBetween(5, 10)); } String[] headers = items.keySet().toArray(new String[numItems]); String csv = items.values().stream().map(v -> quote + v + quote).collect(Collectors.joining(separator + "")); IngestDocument ingestDocument = processDocument(headers, csv); items.forEach((key, value) -> assertEquals(value.replace(quote + quote, quote), ingestDocument.getFieldValue(key, String.class))); } public void testEmptyFields() { int numItems = randomIntBetween(5, 10); Map<String, String> items = new LinkedHashMap<>(); for (int i = 0; i < numItems; i++) { items.put(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)); } String[] headers = items.keySet().toArray(new String[numItems]); String csv = items.values().stream().map(v -> quote + v + quote).limit(numItems - 1).skip(3).collect(Collectors.joining(separator + "")); IngestDocument ingestDocument = processDocument(headers, "" + separator + "" + separator + "" + separator + csv + separator + separator + "abc"); items.keySet().stream().limit(3).forEach(key -> assertFalse(ingestDocument.hasField(key))); items.entrySet().stream().limit(numItems - 1).skip(3).forEach(e -> assertEquals(e.getValue(), ingestDocument.getFieldValue(e.getKey(), String.class))); items.keySet().stream().skip(numItems - 1).forEach(key -> assertFalse(ingestDocument.hasField(key))); } public void testWrongStrings() throws Exception { assumeTrue("single run only", quote.isEmpty()); expectThrows(IllegalArgumentException.class, () -> processDocument(new String[]{"a"}, "abc\"abc")); expectThrows(IllegalArgumentException.class, () -> processDocument(new String[]{"a"}, "\"abc\"asd")); expectThrows(IllegalArgumentException.class, () -> processDocument(new String[]{"a"}, "\"abcasd")); expectThrows(IllegalArgumentException.class, () -> processDocument(new String[]{"a"}, "abc\nabc")); expectThrows(IllegalArgumentException.class, () -> processDocument(new String[]{"a"}, "abc\rabc")); } public void testQuotedWhitespaces() { assumeFalse("quote needed", quote.isEmpty()); IngestDocument document = processDocument(new String[]{"a", "b", "c", "d"}, " abc " + separator + " def" + separator + "ghi " + separator + " " + quote + " ooo " + quote); assertEquals("abc", document.getFieldValue("a", String.class)); assertEquals("def", document.getFieldValue("b", String.class)); assertEquals("ghi", document.getFieldValue("c", String.class)); assertEquals(" ooo ", document.getFieldValue("d", String.class)); } public void testUntrimmed() { assumeFalse("quote needed", quote.isEmpty()); IngestDocument document = processDocument(new String[]{"a", "b", "c", "d", "e", "f"}, " abc " + separator + " def" + separator + "ghi " + separator + " " + quote + "ooo" + quote + " " + separator + " " + quote + "jjj" + quote + " ", false); assertEquals(" abc ", document.getFieldValue("a", String.class)); assertEquals(" def", document.getFieldValue("b", String.class)); assertEquals("ghi ", document.getFieldValue("c", String.class)); assertEquals("ooo", document.getFieldValue("d", String.class)); assertEquals("jjj", document.getFieldValue("e", String.class)); assertFalse(document.hasField("f")); } public void testIgnoreMissing() { assumeTrue("single run only", quote.isEmpty()); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = randomAlphaOfLength(5); if (ingestDocument.hasField(fieldName)) { ingestDocument.removeField(fieldName); } CsvProcessor processor = new CsvProcessor(randomAlphaOfLength(5), null, fieldName, new String[]{"a"}, false, ',', '"', true, null); processor.execute(ingestDocument); CsvProcessor processor2 = new CsvProcessor(randomAlphaOfLength(5), null, fieldName, new String[]{"a"}, false, ',', '"', false, null); expectThrows(IllegalArgumentException.class, () -> processor2.execute(ingestDocument)); } public void testEmptyHeaders() throws Exception { assumeTrue("single run only", quote.isEmpty()); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, "abc,abc"); HashMap<String, Object> metadata = new HashMap<>(ingestDocument.getSourceAndMetadata()); CsvProcessor processor = new CsvProcessor(randomAlphaOfLength(5), null, fieldName, new String[0], false, ',', '"', false, null); processor.execute(ingestDocument); assertEquals(metadata, ingestDocument.getSourceAndMetadata()); } private IngestDocument processDocument(String[] headers, String csv) { return processDocument(headers, csv, true); } private IngestDocument processDocument(String[] headers, String csv, boolean trim) { return processDocument(headers, csv, trim, null); } private IngestDocument processDocument(String[] headers, String csv, boolean trim, Object emptyValue) { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Arrays.stream(headers).filter(ingestDocument::hasField).forEach(ingestDocument::removeField); String fieldName = randomAlphaOfLength(11); ingestDocument.setFieldValue(fieldName, csv); char quoteChar = quote.isEmpty() ? '"' : quote.charAt(0); CsvProcessor processor = new CsvProcessor(randomAlphaOfLength(5), null, fieldName, headers, trim, separator, quoteChar, false, emptyValue); processor.execute(ingestDocument); return ingestDocument; } }
package net.simonvt.menudrawer; import android.app.Activity; import android.content.Context; import android.graphics.Canvas; import android.graphics.drawable.GradientDrawable; import android.util.AttributeSet; import android.view.MotionEvent; public class TopDrawer extends VerticalDrawer { private int mIndicatorLeft; TopDrawer(Activity activity, int dragMode) { super(activity, dragMode); } public TopDrawer(Context context) { super(context); } public TopDrawer(Context context, AttributeSet attrs) { super(context, attrs); } public TopDrawer(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); } @Override public void openMenu(boolean animate) { animateOffsetTo(mMenuSize, 0, animate); } @Override public void closeMenu(boolean animate) { animateOffsetTo(0, 0, animate); } @Override public void setDropShadowColor(int color) { final int endColor = color & 0x00FFFFFF; mDropShadowDrawable = new GradientDrawable(GradientDrawable.Orientation.BOTTOM_TOP, new int[] { color, endColor, }); invalidate(); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { final int width = r - l; final int height = b - t; final int offsetPixels = (int) mOffsetPixels; mMenuContainer.layout(0, 0, width, mMenuSize); offsetMenu(offsetPixels); if (USE_TRANSLATIONS) { mContentContainer.layout(0, 0, width, height); } else { mContentContainer.layout(0, offsetPixels, width, height + offsetPixels); } } /** * Offsets the menu relative to its original position based on the position of the content. * * @param offsetPixels The number of pixels the content if offset. */ private void offsetMenu(int offsetPixels) { if (mOffsetMenu && mMenuSize != 0) { final int menuSize = mMenuSize; final float openRatio = (menuSize - (float) offsetPixels) / menuSize; if (USE_TRANSLATIONS) { if (offsetPixels > 0) { final int offset = (int) (0.25f * (-openRatio * menuSize)); mMenuContainer.setTranslationY(offset); } else { mMenuContainer.setTranslationY(-menuSize); } } else { final int oldMenuTop = mMenuContainer.getTop(); final int offset = (int) (0.25f * (-openRatio * menuSize)) - oldMenuTop; mMenuContainer.offsetTopAndBottom(offset); mMenuContainer.setVisibility(offsetPixels == 0 ? INVISIBLE : VISIBLE); } } } @Override protected void drawDropShadow(Canvas canvas, int offsetPixels) { final int width = getWidth(); mDropShadowDrawable.setBounds(0, offsetPixels - mDropShadowSize, width, offsetPixels); mDropShadowDrawable.draw(canvas); } @Override protected void drawMenuOverlay(Canvas canvas, int offsetPixels) { final int width = getWidth(); final float openRatio = ((float) offsetPixels) / mMenuSize; mMenuOverlay.setBounds(0, 0, width, offsetPixels); mMenuOverlay.setAlpha((int) (MAX_MENU_OVERLAY_ALPHA * (1.f - openRatio))); mMenuOverlay.draw(canvas); } @Override protected void drawIndicator(Canvas canvas, int offsetPixels) { if (mActiveView != null && mActiveView.getParent() != null) { Integer position = (Integer) mActiveView.getTag(R.id.mdActiveViewPosition); final int pos = position == null ? 0 : position; if (pos == mActivePosition) { final int menuHeight = mMenuSize; final int indicatorHeight = mActiveIndicator.getHeight(); final float openRatio = ((float) offsetPixels) / menuHeight; mActiveView.getDrawingRect(mActiveRect); offsetDescendantRectToMyCoords(mActiveView, mActiveRect); final int indicatorWidth = mActiveIndicator.getWidth(); final float interpolatedRatio = 1.f - INDICATOR_INTERPOLATOR.getInterpolation((1.f - openRatio)); final int interpolatedHeight = (int) (indicatorHeight * interpolatedRatio); final int indicatorTop = offsetPixels - interpolatedHeight; if (mIndicatorAnimating) { final int finalLeft = mActiveRect.left + ((mActiveRect.width() - indicatorWidth) / 2); final int startLeft = mIndicatorStartPos; final int diff = finalLeft - startLeft; final int startOffset = (int) (diff * mIndicatorOffset); mIndicatorLeft = startLeft + startOffset; } else { mIndicatorLeft = mActiveRect.left + ((mActiveRect.width() - indicatorWidth) / 2); } canvas.save(); canvas.clipRect(mIndicatorLeft, indicatorTop, mIndicatorLeft + indicatorWidth, offsetPixels); canvas.drawBitmap(mActiveIndicator, mIndicatorLeft, indicatorTop, null); canvas.restore(); } } } @Override protected int getIndicatorStartPos() { return mIndicatorLeft; } @Override protected void initPeekScroller() { final int dx = mMenuSize / 3; mPeekScroller.startScroll(0, 0, dx, 0, PEEK_DURATION); } @Override protected void onOffsetPixelsChanged(int offsetPixels) { if (USE_TRANSLATIONS) { mContentContainer.setTranslationY(offsetPixels); offsetMenu(offsetPixels); invalidate(); } else { mContentContainer.offsetTopAndBottom(offsetPixels - mContentContainer.getTop()); offsetMenu(offsetPixels); invalidate(); } } ////////////////////////////////////////////////////////////////////// // Touch handling ////////////////////////////////////////////////////////////////////// @Override protected boolean isContentTouch(MotionEvent ev) { return ev.getY() > mOffsetPixels; } @Override protected boolean onDownAllowDrag(MotionEvent ev) { return (!mMenuVisible && mInitialMotionY <= mTouchSize) || (mMenuVisible && mInitialMotionY >= mOffsetPixels); } @Override protected boolean onMoveAllowDrag(MotionEvent ev, float diff) { return (!mMenuVisible && mInitialMotionY <= mTouchSize && (diff > 0)) || (mMenuVisible && mInitialMotionY >= mOffsetPixels); } @Override protected void onMoveEvent(float dx) { setOffsetPixels(Math.min(Math.max(mOffsetPixels + dx, 0), mMenuSize)); } @Override protected void onUpEvent(MotionEvent ev) { final int offsetPixels = (int) mOffsetPixels; if (mIsDragging) { mVelocityTracker.computeCurrentVelocity(1000, mMaxVelocity); final int initialVelocity = (int) mVelocityTracker.getXVelocity(); mLastMotionY = ev.getY(); animateOffsetTo(mVelocityTracker.getYVelocity() > 0 ? mMenuSize : 0, initialVelocity, true); // Close the menu when content is clicked while the menu is visible. } else if (mMenuVisible && ev.getY() > offsetPixels) { closeMenu(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.ocm.query.impl; import java.util.Map; import javax.jcr.ValueFactory; import org.apache.jackrabbit.ocm.manager.atomictypeconverter.AtomicTypeConverter; import org.apache.jackrabbit.ocm.mapper.model.ClassDescriptor; import org.apache.jackrabbit.ocm.mapper.model.FieldDescriptor; import org.apache.jackrabbit.ocm.query.Filter; import org.apache.jackrabbit.ocm.reflection.ReflectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * {@link org.apache.jackrabbit.ocm.query.Filter} * * @author <a href="mailto:christophe.lombart@sword-technologies.com">Christophe Lombart</a> * @author <a href="mailto:the_mindstorm[at]evolva[dot]ro">Alex Popescu</a> */ public class FilterImpl implements Filter { private final static Logger log = LoggerFactory.getLogger(FilterImpl.class); private Class claszz; private String scope = ""; private String nodeName = "*"; private String jcrExpression = ""; private ClassDescriptor classDescriptor; private Map atomicTypeConverters; private ValueFactory valueFactory; /** * Constructor * * @param classDescriptor * @param atomicTypeConverters * @param clazz */ public FilterImpl(ClassDescriptor classDescriptor, Map atomicTypeConverters, Class clazz, ValueFactory valueFactory) { this.claszz = clazz; this.atomicTypeConverters = atomicTypeConverters; this.classDescriptor = classDescriptor; this.valueFactory = valueFactory; } public Object clone(){ return new FilterImpl(classDescriptor,atomicTypeConverters,claszz,valueFactory); } /** * * @see org.apache.jackrabbit.ocm.query.Filter#getFilterClass() */ public Class getFilterClass() { return claszz; } /** * @see org.apache.jackrabbit.ocm.query.Filter#setScope(java.lang.String) */ public void setScope(String scope) { this.scope = scope; } /** * @see org.apache.jackrabbit.ocm.query.Filter#getScope() */ public String getScope() { return this.scope; } public String getNodeName() { return nodeName; } public void setNodeName(String nodeName) { this.nodeName = nodeName; } /** * @see org.apache.jackrabbit.ocm.query.Filter#addContains(java.lang.String, java.lang.String) */ public Filter addContains(String scope, String fullTextSearch) { String jcrExpression = null; if (scope.equals(".")) { jcrExpression = "jcr:contains(., '" + fullTextSearch + "')"; } else { jcrExpression = "jcr:contains(@" + this.getJcrFieldName(scope) + ", '" + fullTextSearch + "')"; } addExpression(jcrExpression); return this; } /** * @see org.apache.jackrabbit.ocm.query.Filter#addBetween(java.lang.String, java.lang.Object, java.lang.Object) */ public Filter addBetween(String fieldAttributeName, Object value1, Object value2) { String jcrExpression = "( @" + this.getJcrFieldName(fieldAttributeName) + " >= " + this.getStringValue(fieldAttributeName, value1) + " and @" + this.getJcrFieldName(fieldAttributeName) + " <= " + this.getStringValue(fieldAttributeName, value2) + ")"; addExpression(jcrExpression); return this; } /** * @see org.apache.jackrabbit.ocm.query.Filter#addEqualTo(java.lang.String, java.lang.Object) */ public Filter addEqualTo(String fieldAttributeName, Object value) { String jcrExpression = "@" + this.getJcrFieldName(fieldAttributeName) + " = " + this.getStringValue(fieldAttributeName, value); addExpression(jcrExpression); return this; } /** * @see org.apache.jackrabbit.ocm.query.Filter#addGreaterOrEqualThan(java.lang.String, java.lang.Object) */ public Filter addGreaterOrEqualThan(String fieldAttributeName, Object value) { String jcrExpression = "@" + this.getJcrFieldName(fieldAttributeName) + " >= " + this.getStringValue(fieldAttributeName, value); addExpression(jcrExpression); return this; } /** * @see org.apache.jackrabbit.ocm.query.Filter#addGreaterThan(java.lang.String, java.lang.Object) */ public Filter addGreaterThan(String fieldAttributeName, Object value) { String jcrExpression = "@" + this.getJcrFieldName(fieldAttributeName) + " > " + this.getStringValue(fieldAttributeName, value); addExpression(jcrExpression); return this; } /** * @see org.apache.jackrabbit.ocm.query.Filter#addLessOrEqualThan(java.lang.String, java.lang.Object) */ public Filter addLessOrEqualThan(String fieldAttributeName, Object value) { String jcrExpression = "@" + this.getJcrFieldName(fieldAttributeName) + " <= " + this.getStringValue(fieldAttributeName, value); addExpression(jcrExpression); return this; } /** * @see org.apache.jackrabbit.ocm.query.Filter#addLessOrEqualThan(java.lang.String, java.lang.Object) */ public Filter addLessThan(String fieldAttributeName, Object value) { String jcrExpression = "@" + this.getJcrFieldName(fieldAttributeName) + " < " + this.getStringValue(fieldAttributeName, value); addExpression(jcrExpression); return this; } /** * @see org.apache.jackrabbit.ocm.query.Filter#addLike(java.lang.String, java.lang.Object) */ public Filter addLike(String fieldAttributeName, Object value) { String jcrExpression = "jcr:like(" + "@" + this.getJcrFieldName(fieldAttributeName) + ", '" + value + "')"; addExpression(jcrExpression); return this; } /** * @see org.apache.jackrabbit.ocm.query.Filter#addNotEqualTo(java.lang.String, java.lang.Object) */ public Filter addNotEqualTo(String fieldAttributeName, Object value) { String jcrExpression = "@" + this.getJcrFieldName(fieldAttributeName) + " != " + this.getStringValue(fieldAttributeName, value); addExpression(jcrExpression); return this; } /** * @see org.apache.jackrabbit.ocm.query.Filter#addNotNull(java.lang.String) */ public Filter addNotNull(String fieldAttributeName) { String jcrExpression = "@" + this.getJcrFieldName(fieldAttributeName); addExpression(jcrExpression); return this; } /** * @see org.apache.jackrabbit.ocm.query.Filter#addIsNull(java.lang.String) */ public Filter addIsNull(String fieldAttributeName) { String jcrExpression = "not(@" + this.getJcrFieldName(fieldAttributeName) + ")"; addExpression(jcrExpression); return this; } public Filter addOrFilter(String fieldAttributeName, String[] valueList) { String jcrExpression = ""; for(Object object: valueList){ jcrExpression = "@" + this.getJcrFieldName(fieldAttributeName) + " = " + this.getStringValue(fieldAttributeName, object); orExpression(jcrExpression); } addExpression(jcrExpression); return this; } /** * @see org.apache.jackrabbit.ocm.query.Filter#addOrFilter(org.apache.jackrabbit.ocm.query.Filter) */ public Filter addOrFilter(Filter filter) { FilterImpl theFilter = (FilterImpl) filter; if (theFilter.getJcrExpression() != null && theFilter.getJcrExpression().length() > 0) { if ( null == jcrExpression || "".equals(jcrExpression) ) { jcrExpression = ((FilterImpl) filter).getJcrExpression() ; } else { jcrExpression = "(" + jcrExpression + ") or ( " + ((FilterImpl) filter).getJcrExpression() + ")"; } } return this; } /** * @see org.apache.jackrabbit.ocm.query.Filter#addAndFilter(Filter) */ public Filter addAndFilter(Filter filter) { FilterImpl theFilter = (FilterImpl) filter; if (theFilter.getJcrExpression() != null && theFilter.getJcrExpression().length() > 0) { if ( null == jcrExpression || "".equals(jcrExpression) ) { jcrExpression = ((FilterImpl) filter).getJcrExpression() ; } else { jcrExpression = "(" + jcrExpression + ") and ( " + ((FilterImpl) filter).getJcrExpression() + ")"; } } return this; } public Filter addJCRExpression(String jcrExpression) { addExpression(jcrExpression); return this; } public Filter orJCRExpression(String jcrExpression) { orExpression(jcrExpression); return this; } private String getJcrFieldName(String fieldAttribute) { String jcrFieldName = classDescriptor.getJcrName(fieldAttribute); if (jcrFieldName == null) { log.error("Impossible to find the jcrFieldName for the attribute :" + fieldAttribute); } return jcrFieldName; } private String getStringValue(String fieldName, Object value) { FieldDescriptor fieldDescriptor = classDescriptor.getFieldDescriptor(fieldName); AtomicTypeConverter atomicTypeConverter = null ; // if the attribute is a simple field (primitive data type or wrapper class) if (fieldDescriptor != null) { String fieldConverterName = fieldDescriptor.getConverter(); // if a field converter is set in the mapping, use this one if ( fieldConverterName != null ) { atomicTypeConverter = (AtomicTypeConverter) ReflectionUtils.newInstance(fieldConverterName); } // else use a default converter in function of the attribute type else { atomicTypeConverter = (AtomicTypeConverter) atomicTypeConverters.get(value.getClass()); } } // else it could be a collection (for example, it is a multivalue property) else { atomicTypeConverter = (AtomicTypeConverter) atomicTypeConverters.get(value.getClass()); } return atomicTypeConverter.getXPathQueryValue(valueFactory, value); } public String getJcrExpression() { return this.jcrExpression; } private void addExpression(String jcrExpression) { if (this.jcrExpression.length() >0) { this.jcrExpression += " and "; } this.jcrExpression += jcrExpression ; } private void orExpression(String jcrExpression) { if (this.jcrExpression.length() >0) { this.jcrExpression += " or "; } this.jcrExpression += jcrExpression ; } public String toString() { return getJcrExpression(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.operator.scalar; import com.google.common.collect.ImmutableList; import io.airlift.slice.Slice; import io.prestosql.annotation.UsedByGeneratedCode; import io.prestosql.metadata.FunctionArgumentDefinition; import io.prestosql.metadata.FunctionBinding; import io.prestosql.metadata.FunctionDependencies; import io.prestosql.metadata.FunctionDependencyDeclaration; import io.prestosql.metadata.FunctionMetadata; import io.prestosql.metadata.Signature; import io.prestosql.metadata.SqlScalarFunction; import io.prestosql.spi.PageBuilder; import io.prestosql.spi.PrestoException; import io.prestosql.spi.block.Block; import io.prestosql.spi.block.BlockBuilder; import io.prestosql.spi.connector.ConnectorSession; import io.prestosql.spi.function.InvocationConvention; import io.prestosql.spi.function.InvocationConvention.InvocationArgumentConvention; import io.prestosql.spi.type.Type; import io.prestosql.spi.type.TypeSignature; import io.prestosql.type.UnknownType; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.util.Collections; import java.util.List; import java.util.Optional; import static io.prestosql.metadata.FunctionKind.SCALAR; import static io.prestosql.metadata.Signature.castableToTypeParameter; import static io.prestosql.metadata.Signature.typeVariable; import static io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR; import static io.prestosql.spi.StandardErrorCode.INVALID_FUNCTION_ARGUMENT; import static io.prestosql.spi.function.InvocationConvention.InvocationArgumentConvention.BLOCK_POSITION; import static io.prestosql.spi.function.InvocationConvention.InvocationArgumentConvention.NEVER_NULL; import static io.prestosql.spi.function.InvocationConvention.InvocationReturnConvention.FAIL_ON_NULL; import static io.prestosql.spi.function.InvocationConvention.InvocationReturnConvention.NULLABLE_RETURN; import static io.prestosql.spi.type.TypeSignature.arrayType; import static io.prestosql.spi.type.VarcharType.VARCHAR; import static io.prestosql.util.Reflection.methodHandle; import static java.lang.String.format; public final class ArrayJoin extends SqlScalarFunction { public static final ArrayJoin ARRAY_JOIN = new ArrayJoin(); public static final ArrayJoinWithNullReplacement ARRAY_JOIN_WITH_NULL_REPLACEMENT = new ArrayJoinWithNullReplacement(); private static final String FUNCTION_NAME = "array_join"; private static final String DESCRIPTION = "Concatenates the elements of the given array using a delimiter and an optional string to replace nulls"; private static final MethodHandle METHOD_HANDLE = methodHandle( ArrayJoin.class, "arrayJoin", MethodHandle.class, Object.class, ConnectorSession.class, Block.class, Slice.class); private static final MethodHandle STATE_FACTORY = methodHandle(ArrayJoin.class, "createState"); public static class ArrayJoinWithNullReplacement extends SqlScalarFunction { private static final MethodHandle METHOD_HANDLE = methodHandle( ArrayJoin.class, "arrayJoin", MethodHandle.class, Object.class, ConnectorSession.class, Block.class, Slice.class, Slice.class); public ArrayJoinWithNullReplacement() { super(new FunctionMetadata( new Signature( FUNCTION_NAME, ImmutableList.of(typeVariable("T")), ImmutableList.of(), VARCHAR.getTypeSignature(), ImmutableList.of(arrayType(new TypeSignature("T")), VARCHAR.getTypeSignature(), VARCHAR.getTypeSignature()), false), false, ImmutableList.of( new FunctionArgumentDefinition(false), new FunctionArgumentDefinition(false), new FunctionArgumentDefinition(false)), false, true, DESCRIPTION, SCALAR)); } @Override public FunctionDependencyDeclaration getFunctionDependencies() { return arrayJoinFunctionDependencies(); } @Override public ScalarFunctionImplementation specialize(FunctionBinding functionBinding, FunctionDependencies functionDependencies) { return specializeArrayJoin(functionBinding, functionDependencies, METHOD_HANDLE); } } private ArrayJoin() { super(new FunctionMetadata( new Signature( FUNCTION_NAME, ImmutableList.of(castableToTypeParameter("T", VARCHAR.getTypeSignature())), ImmutableList.of(), VARCHAR.getTypeSignature(), ImmutableList.of(arrayType(new TypeSignature("T")), VARCHAR.getTypeSignature()), false), false, ImmutableList.of( new FunctionArgumentDefinition(false), new FunctionArgumentDefinition(false)), false, true, DESCRIPTION, SCALAR)); } @UsedByGeneratedCode public static Object createState() { return new PageBuilder(ImmutableList.of(VARCHAR)); } @Override public FunctionDependencyDeclaration getFunctionDependencies() { return arrayJoinFunctionDependencies(); } private static FunctionDependencyDeclaration arrayJoinFunctionDependencies() { return FunctionDependencyDeclaration.builder() .addCastSignature(new TypeSignature("T"), VARCHAR.getTypeSignature()) .build(); } @Override public ScalarFunctionImplementation specialize(FunctionBinding functionBinding, FunctionDependencies functionDependencies) { return specializeArrayJoin(functionBinding, functionDependencies, METHOD_HANDLE); } private static ChoicesScalarFunctionImplementation specializeArrayJoin( FunctionBinding functionBinding, FunctionDependencies functionDependencies, MethodHandle methodHandle) { List<InvocationArgumentConvention> argumentConventions = Collections.nCopies(functionBinding.getArity(), NEVER_NULL); Type type = functionBinding.getTypeVariable("T"); if (type instanceof UnknownType) { return new ChoicesScalarFunctionImplementation( functionBinding, FAIL_ON_NULL, argumentConventions, methodHandle.bindTo(null), Optional.of(STATE_FACTORY)); } else { try { InvocationConvention convention = new InvocationConvention(ImmutableList.of(BLOCK_POSITION), NULLABLE_RETURN, true, false); MethodHandle cast = functionDependencies.getCastInvoker(type, VARCHAR, Optional.of(convention)).getMethodHandle(); // if the cast doesn't take a ConnectorSession, create an adapter that drops the provided session if (cast.type().parameterArray()[0] != ConnectorSession.class) { cast = MethodHandles.dropArguments(cast, 0, ConnectorSession.class); } MethodHandle target = MethodHandles.insertArguments(methodHandle, 0, cast); return new ChoicesScalarFunctionImplementation( functionBinding, FAIL_ON_NULL, argumentConventions, target, Optional.of(STATE_FACTORY)); } catch (PrestoException e) { throw new PrestoException(INVALID_FUNCTION_ARGUMENT, format("Input type %s not supported", type), e); } } } @UsedByGeneratedCode public static Slice arrayJoin( MethodHandle castFunction, Object state, ConnectorSession session, Block arrayBlock, Slice delimiter) { return arrayJoin(castFunction, state, session, arrayBlock, delimiter, null); } @UsedByGeneratedCode public static Slice arrayJoin( MethodHandle castFunction, Object state, ConnectorSession session, Block arrayBlock, Slice delimiter, Slice nullReplacement) { PageBuilder pageBuilder = (PageBuilder) state; if (pageBuilder.isFull()) { pageBuilder.reset(); } int numElements = arrayBlock.getPositionCount(); BlockBuilder blockBuilder = pageBuilder.getBlockBuilder(0); boolean needsDelimiter = false; for (int i = 0; i < numElements; i++) { Slice value = null; if (!arrayBlock.isNull(i)) { try { value = (Slice) castFunction.invokeExact(session, arrayBlock, i); } catch (Throwable throwable) { // Restore pageBuilder into a consistent state blockBuilder.closeEntry(); pageBuilder.declarePosition(); throw new PrestoException(GENERIC_INTERNAL_ERROR, "Error casting array element to VARCHAR", throwable); } } if (value == null) { value = nullReplacement; if (value == null) { continue; } } if (needsDelimiter) { blockBuilder.writeBytes(delimiter, 0, delimiter.length()); } blockBuilder.writeBytes(value, 0, value.length()); needsDelimiter = true; } blockBuilder.closeEntry(); pageBuilder.declarePosition(); return VARCHAR.getSlice(blockBuilder, blockBuilder.getPositionCount() - 1); } }
package seedu.addressbook.parser; import seedu.addressbook.commands.*; import seedu.addressbook.data.exception.IllegalValueException; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import static seedu.addressbook.common.Messages.MESSAGE_INVALID_COMMAND_FORMAT; import static seedu.addressbook.common.Messages.MESSAGE_INVALID_PERSON_DISPLAYED_INDEX; /** * Parses user input. */ public class Parser { public static final Pattern PERSON_INDEX_ARGS_FORMAT = Pattern.compile("(?<targetIndex>.+)"); public static final Pattern KEYWORDS_ARGS_FORMAT = Pattern.compile("(?<keywords>\\S+(?:\\s+\\S+)*)"); // one or more keywords separated by whitespace public static final Pattern PERSON_DATA_ARGS_FORMAT = // '/' forward slashes are reserved for delimiter prefixes Pattern.compile("(?<name>[^/]+)" + " (?<isPhonePrivate>p?)p/(?<phone>[^/]+)" + " (?<isEmailPrivate>p?)e/(?<email>[^/]+)" + " (?<isAddressPrivate>p?)a/(?<address>[^/]+)" + "(?<tagArguments>(?: t/[^/]+)*)"); // variable number of tags /** * Signals that the user input could not be parsed. */ public static class ParseException extends Exception { ParseException(String message) { super(message); } } /** * Used for initial separation of command word and args. */ public static final Pattern BASIC_COMMAND_FORMAT = Pattern.compile("(?<commandWord>\\S+)(?<arguments>.*)"); public Parser() {} /** * Parses user input into command for execution. * * @param userInput full user input string * @return the command based on the user input */ public static Command parseCommand(String userInput) { final Matcher matcher = BASIC_COMMAND_FORMAT.matcher(userInput.trim()); if (!matcher.matches()) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, HelpCommand.MESSAGE_USAGE)); } final String commandWord = matcher.group("commandWord"); final String arguments = matcher.group("arguments"); switch (commandWord) { case AddCommand.COMMAND_WORD: return prepareAdd(arguments); case DeleteCommand.COMMAND_WORD: return prepareDelete(arguments); case ClearCommand.COMMAND_WORD: return new ClearCommand(); case FindCommand.COMMAND_WORD: return prepareFind(arguments); case ListCommand.COMMAND_WORD: return new ListCommand(); case ViewCommand.COMMAND_WORD: return prepareView(arguments); case ViewAllCommand.COMMAND_WORD: return prepareViewAll(arguments); case ExitCommand.COMMAND_WORD: return new ExitCommand(); case HelpCommand.COMMAND_WORD: // Fallthrough default: return new HelpCommand(); } } /** * Parses arguments in the context of the add person command. * * @param args full command args string * @return the prepared command */ private static Command prepareAdd(String args){ final Matcher matcher = PERSON_DATA_ARGS_FORMAT.matcher(args.trim()); // Validate arg string format if (!matcher.matches()) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE)); } try { return new AddCommand( matcher.group("name"), matcher.group("phone"), isPrivatePrefixPresent(matcher.group("isPhonePrivate")), matcher.group("email"), isPrivatePrefixPresent(matcher.group("isEmailPrivate")), matcher.group("address"), isPrivatePrefixPresent(matcher.group("isAddressPrivate")), getTagsFromArgs(matcher.group("tagArguments")) ); } catch (IllegalValueException ive) { return new IncorrectCommand(ive.getMessage()); } } /** * Checks whether the private prefix of a contact detail in the add command's arguments string is present. */ private static boolean isPrivatePrefixPresent(String matchedPrefix) { return matchedPrefix.equals("p"); } /** * Extracts the new person's tags from the add command's tag arguments string. * Merges duplicate tag strings. */ private static Set<String> getTagsFromArgs(String tagArguments) throws IllegalValueException { // no tags if (tagArguments.isEmpty()) { return Collections.emptySet(); } // replace first delimiter prefix, then split final Collection<String> tagStrings = Arrays.asList(tagArguments.replaceFirst(" t/", "").split(" t/")); return new HashSet<>(tagStrings); } /** * Parses arguments in the context of the delete person command. * * @param args full command args string * @return the prepared command */ private static Command prepareDelete(String args) { try { final int targetIndex = parseArgsAsDisplayedIndex(args); return new DeleteCommand(targetIndex); } catch (ParseException pe) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, DeleteCommand.MESSAGE_USAGE)); } catch (NumberFormatException nfe) { return new IncorrectCommand(MESSAGE_INVALID_PERSON_DISPLAYED_INDEX); } } /** * Parses arguments in the context of the view command. * * @param args full command args string * @return the prepared command */ private static Command prepareView(String args) { try { final int targetIndex = parseArgsAsDisplayedIndex(args); return new ViewCommand(targetIndex); } catch (ParseException pe) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, ViewCommand.MESSAGE_USAGE)); } catch (NumberFormatException nfe) { return new IncorrectCommand(MESSAGE_INVALID_PERSON_DISPLAYED_INDEX); } } /** * Parses arguments in the context of the view all command. * * @param args full command args string * @return the prepared command */ private static Command prepareViewAll(String args) { try { final int targetIndex = parseArgsAsDisplayedIndex(args); return new ViewAllCommand(targetIndex); } catch (ParseException pe) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, ViewAllCommand.MESSAGE_USAGE)); } catch (NumberFormatException nfe) { return new IncorrectCommand(MESSAGE_INVALID_PERSON_DISPLAYED_INDEX); } } /** * Parses the given arguments string as a single index number. * * @param args arguments string to parse as index number * @return the parsed index number * @throws ParseException if no region of the args string could be found for the index * @throws NumberFormatException the args string region is not a valid number */ private static int parseArgsAsDisplayedIndex(String args) throws ParseException, NumberFormatException { final Matcher matcher = PERSON_INDEX_ARGS_FORMAT.matcher(args.trim()); if (!matcher.matches()) { throw new ParseException("Could not find index number to parse"); } return Integer.parseInt(matcher.group("targetIndex")); } /** * Parses arguments in the context of the find person command. * * @param args full command args string * @return the prepared command */ private static Command prepareFind(String args) { final Matcher matcher = KEYWORDS_ARGS_FORMAT.matcher(args.trim()); if (!matcher.matches()) { return new IncorrectCommand(String.format(MESSAGE_INVALID_COMMAND_FORMAT, FindCommand.MESSAGE_USAGE)); } // keywords delimited by whitespace final String[] keywords = matcher.group("keywords").split("\\s+"); final Set<String> keywordSet = new HashSet<>(Arrays.asList(keywords)); return new FindCommand(keywordSet); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jmeter.visualizers; import java.awt.Color; import java.awt.Dimension; import java.awt.Font; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.LayoutManager; import java.awt.Paint; import java.math.BigDecimal; import javax.swing.JPanel; import javax.swing.UIManager; import org.apache.jmeter.util.JMeterUtils; import org.jCharts.axisChart.AxisChart; import org.jCharts.axisChart.customRenderers.axisValue.renderers.ValueLabelPosition; import org.jCharts.axisChart.customRenderers.axisValue.renderers.ValueLabelRenderer; import org.jCharts.chartData.AxisChartDataSet; import org.jCharts.chartData.ChartDataException; import org.jCharts.chartData.DataSeries; import org.jCharts.properties.AxisProperties; import org.jCharts.properties.ChartProperties; import org.jCharts.properties.ClusteredBarChartProperties; import org.jCharts.properties.DataAxisProperties; import org.jCharts.properties.LabelAxisProperties; import org.jCharts.properties.LegendAreaProperties; import org.jCharts.properties.LegendProperties; import org.jCharts.properties.PropertyException; import org.jCharts.properties.util.ChartFont; import org.jCharts.types.ChartType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Axis graph is used by StatGraphVisualizer, which generates bar graphs * from the statistical data. */ public class AxisGraph extends JPanel { private static final long serialVersionUID = 241L; private static final Logger log = LoggerFactory.getLogger(AxisGraph.class); private static final String ELLIPSIS = "..."; //$NON-NLS-1$ private static final int ELLIPSIS_LEN = ELLIPSIS.length(); protected double[][] data = null; protected String title; protected String xAxisTitle; protected String yAxisTitle; protected String yAxisLabel; protected int maxLength; protected String[] xAxisLabels; protected int width; protected int height; protected String[] legendLabels = { JMeterUtils.getResString("aggregate_graph_legend") }; // $NON-NLS-1$ protected int maxYAxisScale; protected Font titleFont; protected Font legendFont; private static final Font FONT_DEFAULT = UIManager.getDefaults().getFont("TextField.font"); protected Font valueFont = new Font("SansSerif", Font.PLAIN, (int) Math.round(FONT_DEFAULT.getSize() * 0.6)); protected Color[] color = { Color.YELLOW }; protected Color foreColor = Color.BLACK; protected boolean outlinesBarFlag = false; protected boolean showGrouping = true; protected boolean valueOrientation = true; protected int legendPlacement = LegendAreaProperties.BOTTOM; /** * */ public AxisGraph() { super(); } /** * @param layout The {@link LayoutManager} to use */ public AxisGraph(LayoutManager layout) { super(layout); } /** * @param layout The {@link LayoutManager} to use * @param isDoubleBuffered Flag whether double buffering should be used */ public AxisGraph(LayoutManager layout, boolean isDoubleBuffered) { super(layout, isDoubleBuffered); } /** * Expects null array when no data not empty array * @param data The data to be drawn */ public void setData(double[][] data) { this.data = data; } public void setTitle(String title) { this.title = title; } public void setMaxLength(int maxLength) { this.maxLength = maxLength; } public void setXAxisTitle(String title) { this.xAxisTitle = title; } public void setYAxisTitle(String title) { this.yAxisTitle = title; } /** * Expects null array when no labels not empty array * @param labels The labels for the x axis */ public void setXAxisLabels(String[] labels) { this.xAxisLabels = labels; } public void setYAxisLabels(String label) { this.yAxisLabel = label; } public void setLegendLabels(String[] labels) { this.legendLabels = labels; } public void setWidth(int w) { this.width = w; } public void setHeight(int h) { this.height = h; } /** * @return the maxYAxisScale */ public int getMaxYAxisScale() { return maxYAxisScale; } /** * @param maxYAxisScale the maxYAxisScale to set */ public void setMaxYAxisScale(int maxYAxisScale) { this.maxYAxisScale = maxYAxisScale; } /** * @return the color */ public Color[] getColor() { return color; } /** * @param color the color to set */ public void setColor(Color[] color) { this.color = color; } /** * @return the foreColor */ public Color getForeColor() { return foreColor; } /** * @param foreColor the foreColor to set */ public void setForeColor(Color foreColor) { this.foreColor = foreColor; } /** * @return the titleFont */ public Font getTitleFont() { return titleFont; } /** * @param titleFont the titleFont to set */ public void setTitleFont(Font titleFont) { this.titleFont = titleFont; } /** * @return the legendFont */ public Font getLegendFont() { return legendFont; } /** * @param legendFont the legendFont to set */ public void setLegendFont(Font legendFont) { this.legendFont = legendFont; } /** * @return the valueFont */ public Font getValueFont() { return valueFont; } /** * @param valueFont the valueFont to set */ public void setValueFont(Font valueFont) { this.valueFont = valueFont; } /** * @return the legendPlacement */ public int getLegendPlacement() { return legendPlacement; } /** * @param legendPlacement the legendPlacement to set */ public void setLegendPlacement(int legendPlacement) { this.legendPlacement = legendPlacement; } /** * @return the outlinesBarFlag */ public boolean isOutlinesBarFlag() { return outlinesBarFlag; } /** * @param outlinesBarFlag the outlinesBarFlag to set */ public void setOutlinesBarFlag(boolean outlinesBarFlag) { this.outlinesBarFlag = outlinesBarFlag; } /** * @return the valueOrientation */ public boolean isValueOrientation() { return valueOrientation; } /** * @param valueOrientation the valueOrientation to set */ public void setValueOrientation(boolean valueOrientation) { this.valueOrientation = valueOrientation; } /** * @return the showGrouping */ public boolean isShowGrouping() { return showGrouping; } /** * @param showGrouping the showGrouping to set */ public void setShowGrouping(boolean showGrouping) { this.showGrouping = showGrouping; } @Override public void paintComponent(Graphics graphics) { if (data != null && this.title != null && this.xAxisLabels != null && this.yAxisLabel != null && this.yAxisTitle != null) { drawSample(this.title, this.maxLength, this.xAxisLabels, this.yAxisTitle, this.legendLabels, this.data, this.width, this.height, this.color, this.legendFont, graphics); } } private double findMax(double[][] _data) { double max = _data[0][0]; for (double[] dArray : _data) { for (double d : dArray) { if (d > max) { max = d; } } } return max; } private String squeeze(String input, int _maxLength) { if (input.length() > _maxLength) { return input.substring(0,_maxLength-ELLIPSIS_LEN)+ELLIPSIS; } return input; } private void drawSample(String _title, int _maxLength, String[] _xAxisLabels, String _yAxisTitle, String[] _legendLabels, double[][] _data, int _width, int _height, Color[] _color, Font legendFont, Graphics g) { double max = maxYAxisScale > 0 ? maxYAxisScale : findMax(_data); // define max scale y axis try { // Width and Height are already set in StatGraphVisualizer if (_maxLength < 3) { _maxLength = 3; } // if the "Title of Graph" is empty, we can assume some default if (_title.length() == 0 ) { _title = JMeterUtils.getResString("aggregate_graph_title"); //$NON-NLS-1$ } // if the labels are too long, they'll be "squeezed" to make the chart viewable. for (int i = 0; i < _xAxisLabels.length; i++) { String label = _xAxisLabels[i]; _xAxisLabels[i]=squeeze(label, _maxLength); } this.setPreferredSize(new Dimension(_width,_height)); // _xAxisTitle to null (don't display x axis title) DataSeries dataSeries = new DataSeries( _xAxisLabels, null, _yAxisTitle, _title ); ClusteredBarChartProperties clusteredBarChartProperties= new ClusteredBarChartProperties(); clusteredBarChartProperties.setShowOutlinesFlag(outlinesBarFlag); ValueLabelRenderer valueLabelRenderer = new ValueLabelRenderer(false, false, showGrouping, 0); valueLabelRenderer.setValueLabelPosition(ValueLabelPosition.AT_TOP); valueLabelRenderer.setValueChartFont(new ChartFont(valueFont, foreColor)); valueLabelRenderer.useVerticalLabels(valueOrientation); clusteredBarChartProperties.addPostRenderEventListener(valueLabelRenderer); Paint[] paints = new Paint[_color.length]; System.arraycopy(_color, 0, paints, 0, paints.length); AxisChartDataSet axisChartDataSet = new AxisChartDataSet( _data, _legendLabels, paints, ChartType.BAR_CLUSTERED, clusteredBarChartProperties ); dataSeries.addIAxisPlotDataSet( axisChartDataSet ); ChartProperties chartProperties= new ChartProperties(); LabelAxisProperties xaxis = new LabelAxisProperties(); DataAxisProperties yaxis = new DataAxisProperties(); yaxis.setUseCommas(showGrouping); if (legendFont != null) { yaxis.setAxisTitleChartFont(new ChartFont(legendFont, new Color(20))); yaxis.setScaleChartFont(new ChartFont(legendFont, new Color(20))); xaxis.setAxisTitleChartFont(new ChartFont(legendFont, new Color(20))); xaxis.setScaleChartFont(new ChartFont(legendFont, new Color(20))); } if (titleFont != null) { chartProperties.setTitleFont(new ChartFont(titleFont, new Color(0))); } // Y Axis try { BigDecimal round = BigDecimal.valueOf(max / 1000d); round = round.setScale(0, BigDecimal.ROUND_UP); double topValue = round.doubleValue() * 1000; yaxis.setUserDefinedScale(0, 500); yaxis.setNumItems((int) (topValue / 500)+1); yaxis.setShowGridLines(1); } catch (PropertyException e) { log.warn("Chart property exception occurred.", e); } AxisProperties axisProperties= new AxisProperties(xaxis, yaxis); axisProperties.setXAxisLabelsAreVertical(true); LegendProperties legendProperties= new LegendProperties(); legendProperties.setBorderStroke(null); legendProperties.setPlacement(legendPlacement); legendProperties.setIconBorderPaint(Color.WHITE); if (legendPlacement == LegendAreaProperties.RIGHT || legendPlacement == LegendAreaProperties.LEFT) { legendProperties.setNumColumns(1); } if (legendFont != null) { legendProperties.setFont(legendFont); //new Font("SansSerif", Font.PLAIN, 10) } AxisChart axisChart = new AxisChart( dataSeries, chartProperties, axisProperties, legendProperties, _width, _height ); axisChart.setGraphics2D((Graphics2D) g); axisChart.render(); } catch (ChartDataException | PropertyException e) { log.warn("Exception occurred while rendering chart.", e); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator.scalar; import com.facebook.presto.bytecode.BytecodeBlock; import com.facebook.presto.bytecode.BytecodeNode; import com.facebook.presto.bytecode.CallSiteBinder; import com.facebook.presto.bytecode.ClassDefinition; import com.facebook.presto.bytecode.MethodDefinition; import com.facebook.presto.bytecode.Parameter; import com.facebook.presto.bytecode.Scope; import com.facebook.presto.bytecode.Variable; import com.facebook.presto.bytecode.control.ForLoop; import com.facebook.presto.bytecode.control.IfStatement; import com.facebook.presto.common.PageBuilder; import com.facebook.presto.common.QualifiedObjectName; import com.facebook.presto.common.block.Block; import com.facebook.presto.common.block.BlockBuilder; import com.facebook.presto.common.type.ArrayType; import com.facebook.presto.common.type.Type; import com.facebook.presto.metadata.BoundVariables; import com.facebook.presto.metadata.FunctionAndTypeManager; import com.facebook.presto.metadata.SqlScalarFunction; import com.facebook.presto.spi.function.FunctionKind; import com.facebook.presto.spi.function.Signature; import com.facebook.presto.spi.function.SqlFunctionVisibility; import com.facebook.presto.sql.gen.lambda.UnaryFunctionInterface; import com.google.common.collect.ImmutableList; import com.google.common.primitives.Primitives; import java.util.List; import java.util.Optional; import static com.facebook.presto.bytecode.Access.FINAL; import static com.facebook.presto.bytecode.Access.PRIVATE; import static com.facebook.presto.bytecode.Access.PUBLIC; import static com.facebook.presto.bytecode.Access.STATIC; import static com.facebook.presto.bytecode.Access.a; import static com.facebook.presto.bytecode.Parameter.arg; import static com.facebook.presto.bytecode.ParameterizedType.type; import static com.facebook.presto.bytecode.expression.BytecodeExpressions.constantInt; import static com.facebook.presto.bytecode.expression.BytecodeExpressions.constantNull; import static com.facebook.presto.bytecode.expression.BytecodeExpressions.equal; import static com.facebook.presto.bytecode.expression.BytecodeExpressions.lessThan; import static com.facebook.presto.bytecode.expression.BytecodeExpressions.newInstance; import static com.facebook.presto.bytecode.expression.BytecodeExpressions.subtract; import static com.facebook.presto.bytecode.instruction.VariableInstruction.incrementVariable; import static com.facebook.presto.common.type.TypeSignature.parseTypeSignature; import static com.facebook.presto.common.type.UnknownType.UNKNOWN; import static com.facebook.presto.metadata.BuiltInTypeAndFunctionNamespaceManager.DEFAULT_NAMESPACE; import static com.facebook.presto.operator.scalar.BuiltInScalarFunctionImplementation.ArgumentProperty.functionTypeArgumentProperty; import static com.facebook.presto.operator.scalar.BuiltInScalarFunctionImplementation.ArgumentProperty.valueTypeArgumentProperty; import static com.facebook.presto.operator.scalar.BuiltInScalarFunctionImplementation.NullConvention.RETURN_NULL_ON_NULL; import static com.facebook.presto.spi.function.Signature.typeVariable; import static com.facebook.presto.sql.gen.SqlTypeBytecodeExpression.constantType; import static com.facebook.presto.util.CompilerUtils.defineClass; import static com.facebook.presto.util.CompilerUtils.makeClassName; import static com.facebook.presto.util.Reflection.methodHandle; public final class ArrayTransformFunction extends SqlScalarFunction { public static final ArrayTransformFunction ARRAY_TRANSFORM_FUNCTION = new ArrayTransformFunction(); private ArrayTransformFunction() { super(new Signature( QualifiedObjectName.valueOf(DEFAULT_NAMESPACE, "transform"), FunctionKind.SCALAR, ImmutableList.of(typeVariable("T"), typeVariable("U")), ImmutableList.of(), parseTypeSignature("array(U)"), ImmutableList.of(parseTypeSignature("array(T)"), parseTypeSignature("function(T,U)")), false)); } @Override public SqlFunctionVisibility getVisibility() { return SqlFunctionVisibility.PUBLIC; } @Override public boolean isDeterministic() { return false; } @Override public String getDescription() { return "apply lambda to each element of the array"; } @Override public BuiltInScalarFunctionImplementation specialize(BoundVariables boundVariables, int arity, FunctionAndTypeManager functionAndTypeManager) { Type inputType = boundVariables.getTypeVariable("T"); Type outputType = boundVariables.getTypeVariable("U"); Class<?> generatedClass = generateTransform(inputType, outputType); return new BuiltInScalarFunctionImplementation( false, ImmutableList.of( valueTypeArgumentProperty(RETURN_NULL_ON_NULL), functionTypeArgumentProperty(UnaryFunctionInterface.class)), methodHandle(generatedClass, "transform", PageBuilder.class, Block.class, UnaryFunctionInterface.class), Optional.of(methodHandle(generatedClass, "createPageBuilder"))); } private static Class<?> generateTransform(Type inputType, Type outputType) { CallSiteBinder binder = new CallSiteBinder(); Class<?> inputJavaType = Primitives.wrap(inputType.getJavaType()); Class<?> outputJavaType = Primitives.wrap(outputType.getJavaType()); ClassDefinition definition = new ClassDefinition( a(PUBLIC, FINAL), makeClassName("ArrayTransform"), type(Object.class)); definition.declareDefaultConstructor(a(PRIVATE)); // define createPageBuilder MethodDefinition createPageBuilderMethod = definition.declareMethod(a(PUBLIC, STATIC), "createPageBuilder", type(PageBuilder.class)); createPageBuilderMethod.getBody() .append(newInstance(PageBuilder.class, constantType(binder, new ArrayType(outputType)).invoke("getTypeParameters", List.class)).ret()); // define transform method Parameter pageBuilder = arg("pageBuilder", PageBuilder.class); Parameter block = arg("block", Block.class); Parameter function = arg("function", UnaryFunctionInterface.class); MethodDefinition method = definition.declareMethod( a(PUBLIC, STATIC), "transform", type(Block.class), ImmutableList.of(pageBuilder, block, function)); BytecodeBlock body = method.getBody(); Scope scope = method.getScope(); Variable positionCount = scope.declareVariable(int.class, "positionCount"); Variable position = scope.declareVariable(int.class, "position"); Variable blockBuilder = scope.declareVariable(BlockBuilder.class, "blockBuilder"); Variable inputElement = scope.declareVariable(inputJavaType, "inputElement"); Variable outputElement = scope.declareVariable(outputJavaType, "outputElement"); // invoke block.getPositionCount() body.append(positionCount.set(block.invoke("getPositionCount", int.class))); // reset page builder if it is full body.append(new IfStatement() .condition(pageBuilder.invoke("isFull", boolean.class)) .ifTrue(pageBuilder.invoke("reset", void.class))); // get block builder body.append(blockBuilder.set(pageBuilder.invoke("getBlockBuilder", BlockBuilder.class, constantInt(0)))); BytecodeNode loadInputElement; if (!inputType.equals(UNKNOWN)) { loadInputElement = new IfStatement() .condition(block.invoke("isNull", boolean.class, position)) .ifTrue(inputElement.set(constantNull(inputJavaType))) .ifFalse(inputElement.set(constantType(binder, inputType).getValue(block, position).cast(inputJavaType))); } else { loadInputElement = new BytecodeBlock().append(inputElement.set(constantNull(inputJavaType))); } BytecodeNode writeOutputElement; if (!outputType.equals(UNKNOWN)) { writeOutputElement = new IfStatement() .condition(equal(outputElement, constantNull(outputJavaType))) .ifTrue(blockBuilder.invoke("appendNull", BlockBuilder.class).pop()) .ifFalse(constantType(binder, outputType).writeValue(blockBuilder, outputElement.cast(outputType.getJavaType()))); } else { writeOutputElement = new BytecodeBlock().append(blockBuilder.invoke("appendNull", BlockBuilder.class).pop()); } body.append(new ForLoop() .initialize(position.set(constantInt(0))) .condition(lessThan(position, positionCount)) .update(incrementVariable(position, (byte) 1)) .body(new BytecodeBlock() .append(loadInputElement) .append(outputElement.set(function.invoke("apply", Object.class, inputElement.cast(Object.class)).cast(outputJavaType))) .append(writeOutputElement))); body.append(pageBuilder.invoke("declarePositions", void.class, positionCount)); body.append(blockBuilder.invoke("getRegion", Block.class, subtract(blockBuilder.invoke("getPositionCount", int.class), positionCount), positionCount).ret()); return defineClass(definition, Object.class, binder.getBindings(), ArrayTransformFunction.class.getClassLoader()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.backend.hadoop.executionengine.tez.plan.optimizer; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map.Entry; import java.util.Set; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan; import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POSplit; import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezEdgeDescriptor; import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezOpPlanVisitor; import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezOperPlan; import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezOperator; import org.apache.pig.backend.hadoop.executionengine.tez.plan.operator.POValueOutputTez; import org.apache.pig.backend.hadoop.executionengine.tez.util.TezCompilerUtil; import org.apache.pig.impl.plan.OperatorKey; import org.apache.pig.impl.plan.PlanException; import org.apache.pig.impl.plan.ReverseDependencyOrderWalker; import org.apache.pig.impl.plan.VisitorException; public class MultiQueryOptimizerTez extends TezOpPlanVisitor { private boolean unionOptimizerOn; private List<String> unionUnsupportedStoreFuncs; public MultiQueryOptimizerTez(TezOperPlan plan, boolean unionOptimizerOn, List<String> unionUnsupportedStoreFuncs) { super(plan, new ReverseDependencyOrderWalker<TezOperator, TezOperPlan>(plan)); this.unionOptimizerOn = unionOptimizerOn; this.unionUnsupportedStoreFuncs = unionUnsupportedStoreFuncs; } private void addAllPredecessors(TezOperator tezOp, List<TezOperator> predsList) { if (getPlan().getPredecessors(tezOp) != null) { for (TezOperator pred : getPlan().getPredecessors(tezOp)) { predsList.add(pred); addAllPredecessors(pred, predsList); } } } @Override public void visitTezOp(TezOperator tezOp) throws VisitorException { try { if (!tezOp.isSplitter()) { return; } List<TezOperator> splittees = new ArrayList<TezOperator>(); List<TezOperator> successors = getPlan().getSuccessors(tezOp); for (TezOperator successor : successors) { List<TezOperator> predecessors = new ArrayList<TezOperator>(getPlan().getPredecessors(successor)); predecessors.remove(tezOp); if (!predecessors.isEmpty()) { // If has other dependency that conflicts with other splittees, don't merge into split // For eg: self replicate join/skewed join // But if replicate input is from a different operator allow it, but ensure // that we don't have more than one input coming from that operator into the split // Check if other splittees or its predecessors (till the root) are not present in // the predecessors (till the root) of this splittee. // Need to check the whole predecessors hierarchy till root as the conflict // could be multiple levels up for (TezOperator predecessor : getPlan().getPredecessors(successor)) { if (predecessor != tezOp) { predecessors.add(predecessor); addAllPredecessors(predecessor, predecessors); } } List<TezOperator> toMergeSuccPredecessors = new ArrayList<TezOperator>(successors); toMergeSuccPredecessors.remove(successor); for (TezOperator splittee : splittees) { for (TezOperator spliteePred : getPlan().getPredecessors(splittee)) { if (spliteePred != tezOp) { toMergeSuccPredecessors.add(spliteePred); addAllPredecessors(spliteePred, toMergeSuccPredecessors); } } } if (predecessors.removeAll(toMergeSuccPredecessors)) { continue; } } // Split contains right input of different skewed joins if (successor.getSampleOperator() != null && tezOp.getSampleOperator() != null && !successor.getSampleOperator().equals( tezOp.getSampleOperator())) { continue; } // Detect diamond shape into successor operator, we cannot merge it into split, // since Tez does not handle double edge between vertexes // Successor could be // - union operator (if no union optimizer changing it to vertex group which supports multiple edges) // - self replicate join // - self skewed join // Self hash joins can write to same output edge and is handled by POShuffleTezLoad // TODO: PIG-3876 to handle this by writing to same edge Set<TezOperator> mergedSuccessors = new HashSet<TezOperator>(); Set<TezOperator> toMergeSuccessors = new HashSet<TezOperator>(); mergedSuccessors.addAll(successors); for (TezOperator splittee : splittees) { if (getPlan().getSuccessors(splittee) != null) { mergedSuccessors.addAll(getPlan().getSuccessors(splittee)); } } if (getPlan().getSuccessors(successor) != null) { for (TezOperator succSuccessor : getPlan().getSuccessors(successor)) { if (succSuccessor.isUnion()) { if (!(unionOptimizerOn && UnionOptimizer.isOptimizable(succSuccessor, unionUnsupportedStoreFuncs))) { toMergeSuccessors.add(succSuccessor); } } else if (successors.contains(succSuccessor)) { // Self replicate/skewed join toMergeSuccessors.add(succSuccessor); } } } mergedSuccessors.retainAll(toMergeSuccessors); if (mergedSuccessors.isEmpty()) { // no shared edge after merge splittees.add(successor); } } if (splittees.size() == 0) { return; } if (splittees.size()==1 && successors.size()==1) { // We don't need a POSplit here, we can merge the splittee into spliter PhysicalOperator firstNodeLeaf = tezOp.plan.getLeaves().get(0); PhysicalOperator firstNodeLeafPred = tezOp.plan.getPredecessors(firstNodeLeaf).get(0); TezOperator singleSplitee = splittees.get(0); PhysicalOperator secondNodeRoot = singleSplitee.plan.getRoots().get(0); PhysicalOperator secondNodeSucc = singleSplitee.plan.getSuccessors(secondNodeRoot).get(0); tezOp.plan.remove(firstNodeLeaf); singleSplitee.plan.remove(secondNodeRoot); tezOp.plan.merge(singleSplitee.plan); tezOp.plan.connect(firstNodeLeafPred, secondNodeSucc); addSubPlanPropertiesToParent(tezOp, singleSplitee); removeSplittee(getPlan(), tezOp, singleSplitee); } else { POValueOutputTez valueOutput = (POValueOutputTez)tezOp.plan.getLeaves().get(0); POSplit split = new POSplit(OperatorKey.genOpKey(valueOutput.getOperatorKey().getScope())); split.copyAliasFrom(valueOutput); for (TezOperator splitee : splittees) { PhysicalOperator spliteeRoot = splitee.plan.getRoots().get(0); splitee.plan.remove(spliteeRoot); split.addPlan(splitee.plan); addSubPlanPropertiesToParent(tezOp, splitee); removeSplittee(getPlan(), tezOp, splitee); valueOutput.removeOutputKey(splitee.getOperatorKey().toString()); } if (valueOutput.getTezOutputs().length > 0) { // We still need valueOutput PhysicalPlan phyPlan = new PhysicalPlan(); phyPlan.addAsLeaf(valueOutput); split.addPlan(phyPlan); } PhysicalOperator pred = tezOp.plan.getPredecessors(valueOutput).get(0); tezOp.plan.disconnect(pred, valueOutput); tezOp.plan.remove(valueOutput); tezOp.plan.add(split); tezOp.plan.connect(pred, split); } } catch (PlanException e) { throw new VisitorException(e); } } private void removeSplittee(TezOperPlan plan, TezOperator splitter, TezOperator splittee) throws PlanException, VisitorException { plan.disconnect(splitter, splittee); String spliteeKey = splittee.getOperatorKey().toString(); String splitterKey = splitter.getOperatorKey().toString(); if (plan.getPredecessors(splittee) != null) { for (TezOperator pred : new ArrayList<TezOperator>(plan.getPredecessors(splittee))) { TezEdgeDescriptor edge = pred.outEdges.remove(splittee.getOperatorKey()); if (edge == null) { throw new VisitorException("Edge description is empty"); } plan.disconnect(pred, splittee); TezCompilerUtil.connectTezOpToNewSuccesor(plan, pred, splitter, edge, spliteeKey); } } if (plan.getSuccessors(splittee) != null) { List<TezOperator> succs = new ArrayList<TezOperator>(plan.getSuccessors(splittee)); List<TezOperator> splitterSuccs = plan.getSuccessors(splitter); for (TezOperator succTezOperator : succs) { TezEdgeDescriptor edge = succTezOperator.inEdges.get(splittee.getOperatorKey()); splitter.outEdges.remove(splittee.getOperatorKey()); succTezOperator.inEdges.remove(splittee.getOperatorKey()); plan.disconnect(splittee, succTezOperator); // Do not connect again in case of self join/cross/cogroup or union if (splitterSuccs == null || !splitterSuccs.contains(succTezOperator)) { TezCompilerUtil.connectTezOpToNewPredecessor(plan, succTezOperator, splitter, edge, null); } TezCompilerUtil.replaceInput(succTezOperator, spliteeKey, splitterKey); if (succTezOperator.isUnion()) { int index = succTezOperator.getUnionMembers().indexOf(splittee.getOperatorKey()); while (index > -1) { succTezOperator.getUnionMembers().set(index, splitter.getOperatorKey()); index = succTezOperator.getUnionMembers().indexOf(splittee.getOperatorKey()); } } } } plan.remove(splittee); } private void addSubPlanPropertiesToParent(TezOperator parentOper, TezOperator subPlanOper) { // Copy only map side properties. For eg: crossKeys. // Do not copy reduce side specific properties. For eg: useSecondaryKey, segmentBelow, sortOrder, etc if (subPlanOper.getCrossKeys() != null) { for (String key : subPlanOper.getCrossKeys()) { parentOper.addCrossKey(key); } } parentOper.copyFeatures(subPlanOper, null); // For skewed join right input if (subPlanOper.getSampleOperator() != null) { parentOper.setSampleOperator(subPlanOper.getSampleOperator()); } if (subPlanOper.getRequestedParallelism() > parentOper.getRequestedParallelism()) { parentOper.setRequestedParallelism(subPlanOper.getRequestedParallelism()); } subPlanOper.setRequestedParallelismByReference(parentOper); parentOper.UDFs.addAll(subPlanOper.UDFs); parentOper.scalars.addAll(subPlanOper.scalars); if (subPlanOper.outEdges != null) { for (Entry<OperatorKey, TezEdgeDescriptor> entry: subPlanOper.outEdges.entrySet()) { parentOper.outEdges.put(entry.getKey(), entry.getValue()); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.vector.expressions; import java.lang.reflect.Constructor; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation; import org.apache.hadoop.hive.common.type.HiveChar; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator; import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluatorFactory; import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.vector.VectorExtractRow; import org.apache.hadoop.hive.ql.exec.vector.VectorRandomBatchSource; import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource; import org.apache.hadoop.hive.ql.exec.vector.VectorizationContext; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx; import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.GenerationSpec; import org.apache.hadoop.hive.ql.exec.vector.VectorRandomRowSource.SupportedTypes; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.udf.VectorUDFAdaptor; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNegative; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; public class TestVectorStructField { @Test public void testStructField() throws Exception { Random random = new Random(7743); for (int i = 0; i < 5; i++) { doStructFieldTests(random); } } public enum StructFieldTestMode { ROW_MODE, VECTOR_EXPRESSION; static final int count = values().length; } private void doStructFieldTests(Random random) throws Exception { String structTypeName = VectorRandomRowSource.getDecoratedTypeName( random, "struct", SupportedTypes.ALL, /* allowedTypeNameSet */ null, /* depth */ 0, /* maxDepth */ 2); StructTypeInfo structTypeInfo = (StructTypeInfo) TypeInfoUtils.getTypeInfoFromTypeString(structTypeName); List<String> fieldNameList = structTypeInfo.getAllStructFieldNames(); final int fieldCount = fieldNameList.size(); for (int fieldIndex = 0; fieldIndex < fieldCount; fieldIndex++) { doOneStructFieldTest(random, structTypeInfo, structTypeName, fieldIndex); } } private void doOneStructFieldTest(Random random, StructTypeInfo structTypeInfo, String structTypeName, int fieldIndex) throws Exception { List<GenerationSpec> generationSpecList = new ArrayList<GenerationSpec>(); List<DataTypePhysicalVariation> explicitDataTypePhysicalVariationList = new ArrayList<DataTypePhysicalVariation>(); List<String> columns = new ArrayList<String>(); int columnNum = 1; generationSpecList.add( GenerationSpec.createSameType(structTypeInfo)); explicitDataTypePhysicalVariationList.add(DataTypePhysicalVariation.NONE); ExprNodeDesc col1Expr; String columnName = "col" + (columnNum++); col1Expr = new ExprNodeColumnDesc(structTypeInfo, columnName, "table", false); columns.add(columnName); ObjectInspector structObjectInspector = VectorRandomRowSource.getObjectInspector(structTypeInfo); List<ObjectInspector> objectInspectorList = new ArrayList<ObjectInspector>(); objectInspectorList.add(structObjectInspector); List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(); children.add(col1Expr); //---------------------------------------------------------------------------------------------- String[] columnNames = columns.toArray(new String[0]); VectorRandomRowSource rowSource = new VectorRandomRowSource(); rowSource.initGenerationSpecSchema( random, generationSpecList, /* maxComplexDepth */ 0, /* allowNull */ true, /* isUnicodeOk */ true, explicitDataTypePhysicalVariationList); Object[][] randomRows = rowSource.randomRows(100000); VectorRandomBatchSource batchSource = VectorRandomBatchSource.createInterestingBatches( random, rowSource, randomRows, null); List<String> fieldNameList = structTypeInfo.getAllStructFieldNames(); List<TypeInfo> fieldTypeInfoList = structTypeInfo.getAllStructFieldTypeInfos(); String randomFieldName = fieldNameList.get(fieldIndex); TypeInfo outputTypeInfo = fieldTypeInfoList.get(fieldIndex); ExprNodeFieldDesc exprNodeFieldDesc = new ExprNodeFieldDesc(outputTypeInfo, col1Expr, randomFieldName, /* isList */ false); final int rowCount = randomRows.length; Object[][] resultObjectsArray = new Object[StructFieldTestMode.count][]; for (int i = 0; i < StructFieldTestMode.count; i++) { Object[] resultObjects = new Object[rowCount]; resultObjectsArray[i] = resultObjects; StructFieldTestMode negativeTestMode = StructFieldTestMode.values()[i]; switch (negativeTestMode) { case ROW_MODE: doRowStructFieldTest( structTypeInfo, columns, children, exprNodeFieldDesc, randomRows, rowSource.rowStructObjectInspector(), outputTypeInfo, resultObjects); break; case VECTOR_EXPRESSION: doVectorStructFieldTest( structTypeInfo, columns, columnNames, rowSource.typeInfos(), rowSource.dataTypePhysicalVariations(), children, exprNodeFieldDesc, negativeTestMode, batchSource, exprNodeFieldDesc.getWritableObjectInspector(), outputTypeInfo, resultObjects); break; default: throw new RuntimeException("Unexpected Negative operator test mode " + negativeTestMode); } } for (int i = 0; i < rowCount; i++) { // Row-mode is the expected value. Object expectedResult = resultObjectsArray[0][i]; for (int v = 1; v < StructFieldTestMode.count; v++) { Object vectorResult = resultObjectsArray[v][i]; if (expectedResult == null || vectorResult == null) { if (expectedResult != null || vectorResult != null) { Assert.fail( "Row " + i + " structTypeName " + structTypeName + " outputTypeName " + outputTypeInfo.getTypeName() + " " + StructFieldTestMode.values()[v] + " result is NULL " + (vectorResult == null) + " does not match row-mode expected result is NULL " + (expectedResult == null) + " row values " + Arrays.toString(randomRows[i])); } } else { if (!expectedResult.equals(vectorResult)) { Assert.fail( "Row " + i + " structTypeName " + structTypeName + " outputTypeName " + outputTypeInfo.getTypeName() + " " + StructFieldTestMode.values()[v] + " result " + vectorResult.toString() + " (" + vectorResult.getClass().getSimpleName() + ")" + " does not match row-mode expected result " + expectedResult.toString() + " (" + expectedResult.getClass().getSimpleName() + ")" + " row values " + Arrays.toString(randomRows[i])); } } } } } private void doRowStructFieldTest(TypeInfo typeInfo, List<String> columns, List<ExprNodeDesc> children, ExprNodeFieldDesc exprNodeFieldDesc, Object[][] randomRows, ObjectInspector rowInspector, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception { /* System.out.println( "*DEBUG* typeInfo " + typeInfo.toString() + " negativeTestMode ROW_MODE" + " exprDesc " + exprDesc.toString()); */ HiveConf hiveConf = new HiveConf(); ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(exprNodeFieldDesc, hiveConf); evaluator.initialize(rowInspector); ObjectInspector objectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo( outputTypeInfo); final int rowCount = randomRows.length; for (int i = 0; i < rowCount; i++) { Object[] row = randomRows[i]; Object result = evaluator.evaluate(row); Object copyResult = null; try { copyResult = ObjectInspectorUtils.copyToStandardObject( result, objectInspector, ObjectInspectorCopyOption.WRITABLE); } catch (Exception e) { System.out.println("here"); } resultObjects[i] = copyResult; } } private void extractResultObjects(VectorizedRowBatch batch, int rowIndex, VectorExtractRow resultVectorExtractRow, Object[] scrqtchRow, ObjectInspector objectInspector, Object[] resultObjects) { boolean selectedInUse = batch.selectedInUse; int[] selected = batch.selected; for (int logicalIndex = 0; logicalIndex < batch.size; logicalIndex++) { final int batchIndex = (selectedInUse ? selected[logicalIndex] : logicalIndex); resultVectorExtractRow.extractRow(batch, batchIndex, scrqtchRow); Object copyResult = ObjectInspectorUtils.copyToStandardObject( scrqtchRow[0], objectInspector, ObjectInspectorCopyOption.WRITABLE); resultObjects[rowIndex++] = copyResult; } } private void doVectorStructFieldTest(TypeInfo typeInfo, List<String> columns, String[] columnNames, TypeInfo[] typeInfos, DataTypePhysicalVariation[] dataTypePhysicalVariations, List<ExprNodeDesc> children, ExprNodeFieldDesc exprNodeFieldDesc, StructFieldTestMode negativeTestMode, VectorRandomBatchSource batchSource, ObjectInspector objectInspector, TypeInfo outputTypeInfo, Object[] resultObjects) throws Exception { HiveConf hiveConf = new HiveConf(); VectorizationContext vectorizationContext = new VectorizationContext( "name", columns, Arrays.asList(typeInfos), Arrays.asList(dataTypePhysicalVariations), hiveConf); VectorExpression vectorExpression = vectorizationContext.getVectorExpression(exprNodeFieldDesc); vectorExpression.transientInit(hiveConf); if (negativeTestMode == StructFieldTestMode.VECTOR_EXPRESSION && vectorExpression instanceof VectorUDFAdaptor) { System.out.println( "*NO NATIVE VECTOR EXPRESSION* typeInfo " + typeInfo.toString() + " negativeTestMode " + negativeTestMode + " vectorExpression " + vectorExpression.toString()); } String[] outputScratchTypeNames= vectorizationContext.getScratchColumnTypeNames(); VectorizedRowBatchCtx batchContext = new VectorizedRowBatchCtx( columnNames, typeInfos, dataTypePhysicalVariations, /* dataColumnNums */ null, /* partitionColumnCount */ 0, /* virtualColumnCount */ 0, /* neededVirtualColumns */ null, outputScratchTypeNames, null); VectorizedRowBatch batch = batchContext.createVectorizedRowBatch(); VectorExtractRow resultVectorExtractRow = new VectorExtractRow(); resultVectorExtractRow.init( new TypeInfo[] { outputTypeInfo }, new int[] { vectorExpression.getOutputColumnNum() }); Object[] scrqtchRow = new Object[1]; // System.out.println("*VECTOR EXPRESSION* " + vectorExpression.getClass().getSimpleName()); /* System.out.println( "*DEBUG* typeInfo " + typeInfo.toString() + " negativeTestMode " + negativeTestMode + " vectorExpression " + vectorExpression.toString()); */ batchSource.resetBatchIteration(); int rowIndex = 0; while (true) { if (!batchSource.fillNextBatch(batch)) { break; } vectorExpression.evaluate(batch); extractResultObjects(batch, rowIndex, resultVectorExtractRow, scrqtchRow, objectInspector, resultObjects); rowIndex += batch.size; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Alexander T. Simbirtsev */ package javax.swing.text.html; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import javax.swing.event.DocumentEvent; import javax.swing.event.HyperlinkEvent; import javax.swing.text.AbstractDocument; import javax.swing.text.AttributeSet; import javax.swing.text.BadLocationException; import javax.swing.text.DefaultStyledDocumentTest; import javax.swing.text.Element; import javax.swing.text.GapContent; import javax.swing.text.SimpleAttributeSet; import javax.swing.text.Style; import javax.swing.text.StyleConstants; import javax.swing.text.AbstractDocument.AttributeContext; import javax.swing.text.AbstractDocument.Content; import javax.swing.text.DefaultStyledDocument.ElementSpec; import javax.swing.text.html.HTML.Tag; import javax.swing.text.html.HTMLDocument.BlockElement; import javax.swing.text.html.HTMLDocument.Iterator; import javax.swing.text.html.HTMLDocument.RunElement; import javax.swing.text.html.HTMLDocumentTestCase.DocumentController; import javax.swing.text.html.HTMLDocumentTestCase.PublicHTMLDocument; import javax.swing.text.html.parser.ParserDelegator; public class HTMLDocumentTest extends DefaultStyledDocumentTest { protected PublicHTMLDocument htmlDoc; private Marker insertMarker; protected void setUp() throws Exception { super.setUp(); setIgnoreNotImplemented(true); htmlDoc = new PublicHTMLDocument(); timeoutDelay = Integer.MAX_VALUE; insertMarker = htmlDoc.getInsertMarker(); doc = htmlDoc; } protected void tearDown() throws Exception { htmlDoc = null; super.tearDown(); } public void testHTMLDocumentContentStyleSheet() throws MalformedURLException { StyleSheet styles = new StyleSheet(); final GapContent gapContent = new GapContent(10); htmlDoc = new PublicHTMLDocument(gapContent, styles); assertSame(styles, htmlDoc.getAttributeContextPublicly()); assertSame(gapContent, htmlDoc.getContentPublicly()); URL u1 = new URL("http://www.apache.org"); styles.setBase(u1); htmlDoc = new PublicHTMLDocument(gapContent, styles); assertNull(htmlDoc.getBase()); } public void testHTMLDocumentStyleSheet() throws BadLocationException, MalformedURLException { StyleSheet styles = new StyleSheet(); htmlDoc = new PublicHTMLDocument(styles); assertSame(styles, htmlDoc.getAttributeContextPublicly()); final Content content = htmlDoc.getContentPublicly(); assertTrue(content instanceof GapContent); URL u1 = new URL("http://www.apache.org"); styles.setBase(u1); htmlDoc = new PublicHTMLDocument(styles); assertNull(htmlDoc.getBase()); } public void testHTMLDocument() { htmlDoc = new PublicHTMLDocument(); assertTrue(htmlDoc.getContentPublicly() instanceof GapContent); AttributeContext styleSheet = htmlDoc.getAttributeContextPublicly(); assertTrue(styleSheet instanceof StyleSheet); final Enumeration styleNames = ((StyleSheet)styleSheet).getStyleNames(); assertTrue(styleNames.hasMoreElements()); assertEquals("default", styleNames.nextElement()); assertFalse(styleNames.hasMoreElements()); final Style style = ((StyleSheet)styleSheet).getStyle("default"); assertEquals(1, style.getAttributeCount()); assertEquals("default", style.getAttribute(StyleConstants.NameAttribute)); } public void testCreateLeafElement() throws BadLocationException { Element leaf = htmlDoc.createLeafElement(null, null, 0, 1); assertTrue(leaf instanceof HTMLDocument.RunElement); assertNull(leaf.getParentElement()); assertEquals(0, leaf.getStartOffset()); assertEquals(1, leaf.getEndOffset()); assertNotSame(htmlDoc.createLeafElement(null, null, 0, 1), htmlDoc.createLeafElement(null, null, 0, 1)); htmlDoc.insertString(0, "01234", null); Element leaf2 = htmlDoc.createLeafElement(leaf, null, 1, 3); assertTrue(leaf2 instanceof HTMLDocument.RunElement); assertSame(leaf, leaf2.getParentElement()); assertEquals(1, leaf2.getStartOffset()); assertEquals(3, leaf2.getEndOffset()); htmlDoc.remove(0, 5); assertEquals(0, leaf2.getStartOffset()); assertEquals(0, leaf2.getEndOffset()); } public void testCreateBranchElement() { Element branch = htmlDoc.createBranchElement(null, null); assertTrue(branch instanceof HTMLDocument.BlockElement); assertNull(branch.getParentElement()); assertNull(branch.getElement(0)); assertNull(branch.getElement(1)); assertEquals(0, branch.getElementCount()); // Since this branch element has no children yet, it has no start and // end offsets. Thus calling get{Start,End}Offset on an empty branch // element causes the exception being thrown. if (isHarmony()) { try { assertEquals(0, branch.getStartOffset()); fail("getStartOffset on an empty BranchElement " + "causes exception"); } catch (ArrayIndexOutOfBoundsException e) { } try { assertEquals(1, branch.getEndOffset()); fail("getEndOffset on an empty BranchElement causes exception"); } catch (ArrayIndexOutOfBoundsException e) { } } else { try { assertEquals(0, branch.getStartOffset()); fail("getStartOffset on an empty BranchElement " + "causes exception"); } catch (NullPointerException e) { } try { assertEquals(1, branch.getEndOffset()); fail("getEndOffset on an empty BranchElement causes exception"); } catch (NullPointerException e) { } } } public void testCreateDefaultRoot() { try { htmlDoc.insertString(0, "123", null); } catch (BadLocationException e) {} final Element root = htmlDoc.createDefaultRoot(); assertSame(htmlDoc, root.getDocument()); assertTrue(root instanceof BlockElement); AttributeSet attributes = root.getAttributes(); assertNotNull(attributes); assertEquals(1, attributes.getAttributeCount()); assertEquals(StyleConstants.NameAttribute, attributes.getAttributeNames().nextElement()); assertEquals(Tag.HTML, attributes.getAttribute(StyleConstants.NameAttribute)); assertEquals("html", root.getName()); assertNull(root.getParentElement()); assertNull(((BlockElement)root).getResolveParent()); assertEquals(1, root.getElementCount()); assertTrue(root.getElement(0) instanceof BlockElement); assertSame(root, root.getElement(0).getParentElement()); Element child = root.getElement(0); attributes = child.getAttributes(); assertNotNull(attributes); assertEquals(1, attributes.getAttributeCount()); assertEquals(StyleConstants.NameAttribute, child.getAttributes().getAttributeNames().nextElement()); assertEquals(Tag.BODY, attributes.getAttribute(StyleConstants.NameAttribute)); assertEquals("body", child.getName()); assertNull(((BlockElement)child).getResolveParent()); assertEquals(1, child.getElementCount()); assertTrue(child.getElement(0) instanceof BlockElement); assertSame(child, child.getElement(0).getParentElement()); child = child.getElement(0); attributes = child.getAttributes(); assertNotNull(attributes); assertEquals(2, attributes.getAttributeCount()); checkAttributes(attributes, StyleConstants.NameAttribute, Tag.P); checkAttributes(attributes, CSS.Attribute.MARGIN_TOP, "0"); assertEquals("p", child.getName()); assertNull(((BlockElement)child).getResolveParent()); assertEquals(1, child.getElementCount()); assertTrue(child.getElement(0) instanceof RunElement); assertSame(child, child.getElement(0).getParentElement()); child = child.getElement(0); attributes = child.getAttributes(); assertNotNull(attributes); assertEquals(1, attributes.getAttributeCount()); assertEquals(StyleConstants.NameAttribute, child.getAttributes().getAttributeNames().nextElement()); assertEquals(Tag.CONTENT, attributes.getAttribute(StyleConstants.NameAttribute)); assertEquals("content", child.getName()); assertNull(((RunElement)child).getResolveParent()); if (!isHarmony()) { assertEquals(0, child.getStartOffset()); assertEquals(1, child.getEndOffset()); } else { assertEquals(0, child.getStartOffset()); assertEquals(4, child.getEndOffset()); } } public void testGetElementElementObjectObject() throws BadLocationException { final Element root = htmlDoc.getDefaultRootElement(); final String value = "ASD"; assertNull(htmlDoc.getElement(root, HTML.Attribute.ID, value)); final SimpleAttributeSet attr = new SimpleAttributeSet(); attr.addAttribute(HTML.Attribute.NAME, Tag.B.toString()); attr.addAttribute(HTML.Attribute.ID, value); htmlDoc.insertString(0, "0000", attr); Element child1 = root.getElement(0).getElement(0).getElement(0); assertSame(child1, htmlDoc.getElement(root, HTML.Attribute.ID, value)); assertSame(child1, htmlDoc.getElement(root, HTML.Attribute.NAME, Tag.B.toString())); assertNull(htmlDoc.getElement(root, HTML.Attribute.ID, "AAA")); assertNull(htmlDoc.getElement(htmlDoc.getRootElements()[1], HTML.Attribute.ID, "AAA")); attr.addAttribute(HTML.Attribute.NAME, Tag.I.toString()); htmlDoc.insertString(4, "0000", attr); child1 = root.getElement(0).getElement(0).getElement(0); Element child2 = root.getElement(0).getElement(0).getElement(1); assertSame(child1, htmlDoc.getElement(root, HTML.Attribute.ID, value)); assertSame(child1, htmlDoc.getElement(root, HTML.Attribute.NAME, Tag.B.toString())); assertSame(child2, htmlDoc.getElement(root, HTML.Attribute.NAME, Tag.I.toString())); assertNull(htmlDoc.getElement(root, HTML.Attribute.ID, "AAA")); assertNull(htmlDoc.getElement(htmlDoc.getRootElements()[1], HTML.Attribute.ID, "AAA")); } public void testGetElementString() throws Exception { final Element root = htmlDoc.getDefaultRootElement(); final String value = "B"; assertNull(htmlDoc.getElement(value)); SimpleAttributeSet attr = new SimpleAttributeSet(); attr.addAttribute(StyleConstants.NameAttribute, value); htmlDoc.insertString(0, "0000", attr); attr = new SimpleAttributeSet(); attr.addAttribute(StyleConstants.NameAttribute, Tag.P.toString()); attr.addAttribute(HTML.Attribute.ID, value); htmlDoc.insertString(0, "0000", attr); assertSame(root.getElement(0).getElement(0).getElement(0), htmlDoc.getElement(value)); assertNull(htmlDoc.getElement("AAA")); } public void testGetIterator() throws BadLocationException { final SimpleAttributeSet attr = new SimpleAttributeSet(); attr.addAttribute(StyleConstants.NameAttribute, Tag.B.toString()); htmlDoc.insertString(0, "0000", attr); StyleConstants.setItalic(attr, true); htmlDoc.insertString(4, "1111", attr); final Iterator iterator1 = htmlDoc.getIterator(Tag.HTML); final Iterator iterator2 = htmlDoc.getIterator(Tag.HTML); final Iterator iterator3 = htmlDoc.getIterator(Tag.A); final Iterator iterator4 = htmlDoc.getIterator(Tag.P); final Iterator iterator5 = htmlDoc.getIterator(Tag.B); assertNotNull(iterator1); assertNotNull(iterator2); assertNotNull(iterator3); if (isHarmony()) { assertNotNull(iterator4); } assertNotNull(iterator5); assertNotSame(iterator1, iterator2); assertNotSame(iterator2, iterator3); assertNotSame(iterator3, iterator5); assertEquals(Tag.HTML, iterator1.getTag()); if (isHarmony()) { assertTrue(iterator1.isValid()); assertEquals(0, iterator1.getStartOffset()); assertEquals(9, iterator1.getEndOffset()); assertNotNull(iterator1.getAttributes()); iterator1.next(); assertFalse(iterator1.isValid()); assertEquals(-1, iterator1.getStartOffset()); assertEquals(-1, iterator1.getEndOffset()); assertNull(iterator1.getAttributes()); } assertEquals(Tag.HTML, iterator2.getTag()); if (isHarmony()) { assertTrue(iterator2.isValid()); assertEquals(0, iterator2.getStartOffset()); assertEquals(9, iterator2.getEndOffset()); assertNotNull(iterator2.getAttributes()); iterator2.next(); assertFalse(iterator2.isValid()); assertEquals(-1, iterator2.getStartOffset()); assertEquals(-1, iterator2.getEndOffset()); assertNull(iterator2.getAttributes()); } assertEquals(Tag.A, iterator3.getTag()); if (isHarmony()) { assertFalse(iterator3.isValid()); assertNull(iterator3.getAttributes()); iterator3.next(); assertEquals(-1, iterator3.getStartOffset()); assertEquals(-1, iterator3.getEndOffset()); } if (isHarmony()) { assertEquals(Tag.P, iterator4.getTag()); assertTrue(iterator4.isValid()); assertEquals(0, iterator4.getStartOffset()); assertEquals(9, iterator4.getEndOffset()); iterator4.next(); assertFalse(iterator4.isValid()); assertEquals(-1, iterator4.getStartOffset()); assertEquals(-1, iterator4.getEndOffset()); assertNull(iterator4.getAttributes()); } assertEquals(Tag.B, iterator5.getTag()); if (isHarmony()) { assertTrue(iterator5.isValid()); assertEquals(0, iterator5.getStartOffset()); assertEquals(4, iterator5.getEndOffset()); assertFalse(StyleConstants.isBold(iterator5.getAttributes())); assertFalse(StyleConstants.isItalic(iterator5.getAttributes())); iterator5.next(); assertTrue(iterator5.isValid()); assertEquals(4, iterator5.getStartOffset()); assertEquals(8, iterator5.getEndOffset()); assertFalse(StyleConstants.isBold(iterator5.getAttributes())); assertTrue(StyleConstants.isItalic(iterator5.getAttributes())); iterator5.next(); assertFalse(iterator5.isValid()); assertEquals(-1, iterator5.getStartOffset()); assertEquals(-1, iterator5.getEndOffset()); assertNull(iterator5.getAttributes()); } } public void testGetReaderIntIntIntTag() { HTMLEditorKit.ParserCallback reader1 = htmlDoc.getReader(0, 10, 100, null); HTMLEditorKit.ParserCallback reader2 = htmlDoc.getReader(0, 10, 100, null); HTMLEditorKit.ParserCallback reader3 = htmlDoc.getReader(10, 100, 10, Tag.P); assertNotNull(reader1); assertNotNull(reader2); assertNotNull(reader3); assertTrue(reader1 instanceof HTMLDocument.HTMLReader); assertTrue(reader2 instanceof HTMLDocument.HTMLReader); assertTrue(reader3 instanceof HTMLDocument.HTMLReader); assertNotSame(reader1, reader2); assertNotSame(reader2, reader3); } public void testGetReaderInt() { HTMLEditorKit.ParserCallback reader1 = htmlDoc.getReader(0); HTMLEditorKit.ParserCallback reader2 = htmlDoc.getReader(0); HTMLEditorKit.ParserCallback reader3 = htmlDoc.getReader(1); assertNotNull(reader1); assertNotNull(reader2); assertNotNull(reader3); assertTrue(reader1 instanceof HTMLDocument.HTMLReader); assertTrue(reader2 instanceof HTMLDocument.HTMLReader); assertTrue(reader3 instanceof HTMLDocument.HTMLReader); assertNotSame(reader1, reader2); assertNotSame(reader2, reader3); } public void testGetStyleSheet() { AttributeContext styleSheet = htmlDoc.getAttributeContextPublicly(); assertTrue(styleSheet instanceof StyleSheet); assertSame(styleSheet, htmlDoc.getStyleSheet()); } public void testProcessHTMLFrameHyperlinkEvent() throws Exception { final String frameSetHTML = "<FRAMESET><FRAME name=\"1\" src=\"1.html\"><FRAME name=\"2\" src=\"2.html\"><img name=\"3\" src=\"3.jpg\"></FRAMESET>"; HTMLDocumentTestCase.loadDocument(htmlDoc, frameSetHTML); final Element body = htmlDoc.getDefaultRootElement().getElement(1); final Element frameSet = body.getElement(0); final Element frame1 = frameSet.getElement(0); final Element frame2 = frameSet.getElement(1); final String urlStr1 = "file:/test1.html"; final String urlStr2 = "file:/test2.html"; HTMLFrameHyperlinkEvent event1 = new HTMLFrameHyperlinkEvent(htmlDoc, HyperlinkEvent.EventType.ACTIVATED, new URL(urlStr1), "3"); HTMLFrameHyperlinkEvent event2 = new HTMLFrameHyperlinkEvent(htmlDoc, HyperlinkEvent.EventType.ACTIVATED, new URL(urlStr1), frame2, "_self"); HTMLFrameHyperlinkEvent event3 = new HTMLFrameHyperlinkEvent(htmlDoc, HyperlinkEvent.EventType.ACTIVATED, new URL(urlStr2), frame2, "_top"); HTMLFrameHyperlinkEvent event4 = new HTMLFrameHyperlinkEvent(htmlDoc, HyperlinkEvent.EventType.ACTIVATED, new URL(urlStr2), frame2, "_parent"); HTMLFrameHyperlinkEvent event5 = new HTMLFrameHyperlinkEvent(htmlDoc, HyperlinkEvent.EventType.ACTIVATED, new URL(urlStr1), "1"); final DocumentController controller = new DocumentController(); htmlDoc.addDocumentListener(controller); htmlDoc.processHTMLFrameHyperlinkEvent(event1); assertFalse(controller.isChanged()); controller.reset(); assertSame(frame2, event2.getSourceElement()); htmlDoc.processHTMLFrameHyperlinkEvent(event2); assertNull(htmlDoc.getParser()); assertTrue(controller.isChanged()); assertEquals(1, controller.getNumEvents()); assertTrue(controller.getEvent(0) instanceof AbstractDocument.DefaultDocumentEvent); AbstractDocument.DefaultDocumentEvent event = (AbstractDocument.DefaultDocumentEvent)controller.getEvent(0); assertEquals(DocumentEvent.EventType.CHANGE, event.getType()); assertEquals(frame2.getStartOffset(), event.getOffset()); assertEquals(frame2.getEndOffset() - frame2.getStartOffset(), event.getLength()); assertSame(htmlDoc, event.getDocument()); assertNull(event.getChange(frame2)); assertEquals(DocumentEvent.EventType.CHANGE, controller.getEvent(0).getType()); assertEquals(urlStr1, frame2.getAttributes().getAttribute(HTML.Attribute.SRC)); controller.reset(); htmlDoc.setParser(null); htmlDoc.processHTMLFrameHyperlinkEvent(event4); assertNotNull(htmlDoc.getParser()); assertTrue(controller.isChanged()); assertEquals(2, controller.getNumEvents()); assertEquals(DocumentEvent.EventType.INSERT, controller.getEvent(0).getType()); assertEquals(DocumentEvent.EventType.REMOVE, controller.getEvent(1).getType()); Element newFrame = body.getElement(0); AttributeSet frameAttr = newFrame.getAttributes(); assertEquals(2, frameAttr.getAttributeCount()); assertEquals(urlStr2, frameAttr.getAttribute(HTML.Attribute.SRC)); assertEquals(Tag.FRAME, frameAttr.getAttribute(StyleConstants.NameAttribute)); controller.reset(); // tests improper use behaviour (for compatibility reasons) HTMLFrameHyperlinkEvent event6 = new HTMLFrameHyperlinkEvent(htmlDoc, HyperlinkEvent.EventType.ACTIVATED, new URL(urlStr1), newFrame, "_parent"); ParserDelegator parser = new ParserDelegator(); htmlDoc.setParser(parser); htmlDoc.processHTMLFrameHyperlinkEvent(event6); assertSame(parser, htmlDoc.getParser()); assertTrue(controller.isChanged()); assertEquals(2, controller.getNumEvents()); assertEquals(DocumentEvent.EventType.INSERT, controller.getEvent(0).getType()); assertEquals(DocumentEvent.EventType.REMOVE, controller.getEvent(1).getType()); newFrame = htmlDoc.getDefaultRootElement().getElement(1); assertNotSame(frameSet, newFrame); frameAttr = newFrame.getAttributes(); assertEquals(2, frameAttr.getAttributeCount()); assertEquals(urlStr1, frameAttr.getAttribute(HTML.Attribute.SRC)); assertEquals(Tag.FRAME, frameAttr.getAttribute(StyleConstants.NameAttribute)); controller.reset(); } public void testGetSetBase() throws Exception { URL u1 = new URL("http://www.apache.org"); URL u2 = new URL("http://www.harmony.incubator.apache.org"); String tail = "tail"; htmlDoc.setBase(u1); assertSame(u1, htmlDoc.getBase()); assertSame(u1, htmlDoc.getStyleSheet().getBase()); htmlDoc.getStyleSheet().setBase(u2); assertSame(u2, htmlDoc.getStyleSheet().getBase()); assertSame(u1, htmlDoc.getBase()); } public void testGetSetParser() { assertNull(htmlDoc.getParser()); ParserDelegator parser = new ParserDelegator(); htmlDoc.setParser(parser); assertSame(parser, htmlDoc.getParser()); } public void testGetSetPreservesUnknownTags() throws Exception { assertTrue(htmlDoc.getPreservesUnknownTags()); htmlDoc.setPreservesUnknownTags(false); assertFalse(htmlDoc.getPreservesUnknownTags()); Marker createMarker = htmlDoc.getCreateMarker(); final String htmlStr = "<html><body><badtag>0</badtag></body></html>"; HTMLDocumentTestCase.loadDocument(htmlDoc, htmlStr); Element parent = htmlDoc.getDefaultRootElement().getElement(1).getElement(0); ArrayList array = (ArrayList)createMarker.getAuxiliary(); assertEquals(1, array.size()); assertEquals(13, ((ElementSpec[])(array.get(0))).length); assertEquals(2, parent.getElementCount()); createMarker.reset(); htmlDoc = new PublicHTMLDocument(); htmlDoc.setPreservesUnknownTags(true); createMarker = htmlDoc.getCreateMarker(); HTMLDocumentTestCase.loadDocument(htmlDoc, htmlStr); parent = htmlDoc.getDefaultRootElement().getElement(1).getElement(0); assertTrue(createMarker.isOccurred()); array = (ArrayList)createMarker.getAuxiliary(); assertEquals(1, array.size()); assertEquals(15, ((ElementSpec[])(array.get(0))).length); assertEquals(4, parent.getElementCount()); assertEquals("badtag", parent.getElement(0).getName()); assertEquals(Tag.CONTENT.toString(), parent.getElement(1).getName()); assertEquals("badtag", parent.getElement(2).getName()); checkAttributes(parent.getElement(2).getAttributes(), HTML.Attribute.ENDTAG, Boolean.TRUE); createMarker.reset(); } public void testGetSetTokenThreshold() throws Exception { assertEquals(Integer.MAX_VALUE, htmlDoc.getTokenThreshold()); htmlDoc.setTokenThreshold(100); assertEquals(100, htmlDoc.getTokenThreshold()); final String longString = "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a" + "<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a<br>a"; htmlDoc = new PublicHTMLDocument(); insertMarker = htmlDoc.getInsertMarker(); Marker createMarker = htmlDoc.getCreateMarker(); final ParserDelegator parser = new ParserDelegator(); htmlDoc.setParser(parser); htmlDoc.setEditable(false); htmlDoc.setTokenThreshold(1); Element root = htmlDoc.getDefaultRootElement(); Element branch = root.getElement(0).getElement(0); htmlDoc.insertAfterStart(branch, longString); assertFalse(createMarker.isOccurred()); assertTrue(insertMarker.isOccurred()); ArrayList info = (ArrayList)insertMarker.getAuxiliary(); assertNotNull(info); assertEquals(5, info.size()/2); assertEquals(2, ((ElementSpec[])(info.get(0))).length); assertEquals(6, ((ElementSpec[])info.get(2)).length); assertEquals(26, ((ElementSpec[])info.get(4)).length); assertEquals(126, ((ElementSpec[])info.get(6)).length); assertEquals(305, ((ElementSpec[])info.get(8)).length); assertEquals(1, htmlDoc.getTokenThreshold()); insertMarker.reset(); htmlDoc = new PublicHTMLDocument(); insertMarker = htmlDoc.getInsertMarker(); htmlDoc.setEditable(false); htmlDoc.setParser(parser); htmlDoc.setTokenThreshold(2); root = htmlDoc.getDefaultRootElement(); branch = root.getElement(0).getElement(0); htmlDoc.insertAfterStart(branch, longString); assertFalse(createMarker.isOccurred()); assertTrue(insertMarker.isOccurred()); info = (ArrayList)insertMarker.getAuxiliary(); assertNotNull(info); assertEquals(5, info.size()/2); assertEquals(4, ((ElementSpec[])info.get(0)).length); assertEquals(12, ((ElementSpec[])info.get(2)).length); assertEquals(52, ((ElementSpec[])info.get(4)).length); assertEquals(252, ((ElementSpec[])info.get(6)).length); assertEquals(145, ((ElementSpec[])info.get(8)).length); assertEquals(2, htmlDoc.getTokenThreshold()); insertMarker.reset(); htmlDoc = new PublicHTMLDocument(); insertMarker = htmlDoc.getInsertMarker(); htmlDoc.setEditable(false); htmlDoc.setParser(parser); htmlDoc.setTokenThreshold(5); root = htmlDoc.getDefaultRootElement(); branch = root.getElement(0).getElement(0); htmlDoc.insertAfterStart(branch, longString); assertFalse(createMarker.isOccurred()); assertTrue(insertMarker.isOccurred()); info = (ArrayList)insertMarker.getAuxiliary(); assertNotNull(info); assertEquals(4, info.size()/2); assertEquals(6, ((ElementSpec[])info.get(0)).length); assertEquals(26, ((ElementSpec[])info.get(2)).length); assertEquals(126, ((ElementSpec[])info.get(4)).length); assertEquals(307, ((ElementSpec[])info.get(6)).length); assertEquals(5, htmlDoc.getTokenThreshold()); insertMarker.reset(); } public void testGetDefaultRootElement() { } public void testInsertUpdate() throws Exception { doc.insertString(0, "1111", null); final AttributeSet attr = doc.getCharacterElement(2).getAttributes(); assertEquals(1, attr.getAttributeCount()); assertEquals(Tag.CONTENT, attr.getAttribute(StyleConstants.NameAttribute)); } public void testSerializable() throws Exception { } protected static void checkAttributes(final AttributeSet attr, final Object key, final Object value) { HTMLDocumentTestCase.checkAttributes(attr, key, value); } }
/******************************************************************************* * Copyright 2013 SAP AG * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.sap.core.odata.core; import java.net.URI; import java.text.ParseException; import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import javax.servlet.ServletConfig; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.UriInfo; import com.sap.core.odata.api.ODataServiceFactory; import com.sap.core.odata.api.batch.BatchException; import com.sap.core.odata.api.commons.HttpStatusCodes; import com.sap.core.odata.api.ep.EntityProvider; import com.sap.core.odata.api.ep.EntityProviderException; import com.sap.core.odata.api.exception.MessageReference; import com.sap.core.odata.api.exception.ODataApplicationException; import com.sap.core.odata.api.exception.ODataException; import com.sap.core.odata.api.exception.ODataHttpException; import com.sap.core.odata.api.exception.ODataMessageException; import com.sap.core.odata.api.processor.ODataContext; import com.sap.core.odata.api.processor.ODataErrorCallback; import com.sap.core.odata.api.processor.ODataErrorContext; import com.sap.core.odata.api.processor.ODataResponse; import com.sap.core.odata.core.commons.ContentType; import com.sap.core.odata.core.ep.ProviderFacadeImpl; import com.sap.core.odata.core.exception.MessageService; import com.sap.core.odata.core.exception.MessageService.Message; import com.sap.core.odata.core.exception.ODataRuntimeException; /** * @author SAP AG */ public class ODataExceptionWrapper { private static final String DOLLAR_FORMAT = "$format"; private static final String DOLLAR_FORMAT_JSON = "json"; private static final Locale DEFAULT_RESPONSE_LOCALE = Locale.ENGLISH; private final String contentType; private final Locale messageLocale; private final Map<String, List<String>> httpRequestHeaders; private final ODataErrorCallback callback; private final ODataErrorContext errorContext = new ODataErrorContext(); private final URI requestUri; public ODataExceptionWrapper(final ODataContext context, final Map<String, String> queryParameters, final List<String> acceptHeaderContentTypes) { contentType = getContentType(queryParameters, acceptHeaderContentTypes).toContentTypeString(); messageLocale = MessageService.getSupportedLocale(getLanguages(context), DEFAULT_RESPONSE_LOCALE); httpRequestHeaders = context.getRequestHeaders(); try { requestUri = context.getPathInfo().getRequestUri(); errorContext.setPathInfo(context.getPathInfo()); callback = getErrorHandlerCallbackFromContext(context); } catch (Exception e) { throw new ODataRuntimeException("Exception occurred", e); } } public ODataExceptionWrapper(final UriInfo uriInfo, final HttpHeaders httpHeaders, final ServletConfig servletConfig, final HttpServletRequest servletRequest) { contentType = getContentType(uriInfo, httpHeaders).toContentTypeString(); messageLocale = MessageService.getSupportedLocale(getLanguages(httpHeaders), DEFAULT_RESPONSE_LOCALE); httpRequestHeaders = httpHeaders.getRequestHeaders(); requestUri = uriInfo.getRequestUri(); try { callback = getErrorHandlerCallbackFromServletConfig(servletConfig, servletRequest); } catch (Exception e) { throw new ODataRuntimeException("Exception occurred", e); } } public ODataResponse wrapInExceptionResponse(final Exception exception) { try { final Exception toHandleException = extractException(exception); fillErrorContext(toHandleException); if (toHandleException instanceof ODataApplicationException) { enhanceContextWithApplicationException((ODataApplicationException) toHandleException); } else if (toHandleException instanceof ODataMessageException) { enhanceContextWithMessageException((ODataMessageException) toHandleException); } ODataResponse oDataResponse; if (callback != null) { oDataResponse = handleErrorCallback(callback); } else { oDataResponse = EntityProvider.writeErrorDocument(errorContext); } return oDataResponse; } catch (Exception e) { ODataResponse response = ODataResponse.entity("Exception during error handling occured!") .contentHeader(ContentType.TEXT_PLAIN.toContentTypeString()) .status(HttpStatusCodes.INTERNAL_SERVER_ERROR).build(); return response; } } private ODataResponse handleErrorCallback(final ODataErrorCallback callback) throws EntityProviderException { ODataResponse oDataResponse; try { oDataResponse = callback.handleError(errorContext); } catch (ODataApplicationException e) { fillErrorContext(e); enhanceContextWithApplicationException(e); oDataResponse = new ProviderFacadeImpl().writeErrorDocument(errorContext); } return oDataResponse; } private void enhanceContextWithApplicationException(final ODataApplicationException toHandleException) { errorContext.setHttpStatus(toHandleException.getHttpStatus()); errorContext.setErrorCode(toHandleException.getCode()); } private void enhanceContextWithMessageException(final ODataMessageException toHandleException) { errorContext.setErrorCode(toHandleException.getErrorCode()); MessageReference messageReference = toHandleException.getMessageReference(); Message localizedMessage = messageReference == null ? null : extractEntity(messageReference); if (localizedMessage != null) { errorContext.setMessage(localizedMessage.getText()); errorContext.setLocale(localizedMessage.getLocale()); } if (toHandleException instanceof ODataHttpException) { errorContext.setHttpStatus(((ODataHttpException) toHandleException).getHttpStatus()); } else if (toHandleException instanceof EntityProviderException) { errorContext.setHttpStatus(HttpStatusCodes.BAD_REQUEST); } else if (toHandleException instanceof BatchException) { errorContext.setHttpStatus(HttpStatusCodes.BAD_REQUEST); } } /** * Fill current error context ({@link #errorContext}) with values from given {@link Exception} parameter. * * @param exception exception with values to be set on error context */ private void fillErrorContext(final Exception exception) { errorContext.setContentType(contentType); errorContext.setException(exception); errorContext.setHttpStatus(HttpStatusCodes.INTERNAL_SERVER_ERROR); errorContext.setErrorCode(null); errorContext.setMessage(exception.getMessage()); errorContext.setLocale(DEFAULT_RESPONSE_LOCALE); errorContext.setRequestUri(requestUri); if (httpRequestHeaders != null) { for (Entry<String, List<String>> entry : httpRequestHeaders.entrySet()) { errorContext.putRequestHeader(entry.getKey(), entry.getValue()); } } } private Exception extractException(final Exception exception) { if (exception instanceof ODataException) { ODataException odataException = (ODataException) exception; if (odataException.isCausedByApplicationException()) { return odataException.getApplicationExceptionCause(); } else if (odataException.isCausedByHttpException()) { return odataException.getHttpExceptionCause(); } else if (odataException.isCausedByMessageException()) { return odataException.getMessageExceptionCause(); } } return exception; } private Message extractEntity(final MessageReference context) { return MessageService.getMessage(messageLocale, context); } private List<Locale> getLanguages(final ODataContext context) { try { if (context.getAcceptableLanguages().isEmpty()) { return Arrays.asList(DEFAULT_RESPONSE_LOCALE); } return context.getAcceptableLanguages(); } catch (WebApplicationException e) { if (e.getCause() != null && e.getCause().getClass() == ParseException.class) { // invalid accept-language string in http header // compensate exception with using default locale return Arrays.asList(DEFAULT_RESPONSE_LOCALE); } // not able to compensate exception -> re-throw throw e; } } private List<Locale> getLanguages(final HttpHeaders httpHeaders) { try { if (httpHeaders.getAcceptableLanguages().isEmpty()) { return Arrays.asList(DEFAULT_RESPONSE_LOCALE); } return httpHeaders.getAcceptableLanguages(); } catch (WebApplicationException e) { if (e.getCause() != null && e.getCause().getClass() == ParseException.class) { // invalid accept-language string in http header // compensate exception with using default locale return Arrays.asList(DEFAULT_RESPONSE_LOCALE); } // not able to compensate exception -> re-throw throw e; } } private ContentType getContentType(final Map<String, String> queryParameters, final List<String> acceptHeaderContentTypes) { ContentType contentType = getContentTypeByUriInfo(queryParameters); if (contentType == null) { contentType = getContentTypeByAcceptHeader(acceptHeaderContentTypes); } return contentType; } private ContentType getContentTypeByUriInfo(final Map<String, String> queryParameters) { ContentType contentType = null; if (queryParameters != null) { if (queryParameters.containsKey(DOLLAR_FORMAT)) { String contentTypeString = queryParameters.get(DOLLAR_FORMAT); if (DOLLAR_FORMAT_JSON.equals(contentTypeString)) { contentType = ContentType.APPLICATION_JSON; } else { //Any format mentioned in the $format parameter other than json results in an application/xml content type for error messages //due to the OData V2 Specification contentType = ContentType.APPLICATION_XML; } } } return contentType; } private ContentType getContentTypeByAcceptHeader(final List<String> acceptHeaderContentTypes) { for (String acceptContentType : acceptHeaderContentTypes) { if (ContentType.isParseable(acceptContentType)) { ContentType convertedContentType = ContentType.create(acceptContentType); if (convertedContentType.isWildcard() || ContentType.APPLICATION_XML.equals(convertedContentType) || ContentType.APPLICATION_XML_CS_UTF_8.equals(convertedContentType) || ContentType.APPLICATION_ATOM_XML.equals(convertedContentType) || ContentType.APPLICATION_ATOM_XML_CS_UTF_8.equals(convertedContentType)) { return ContentType.APPLICATION_XML; } else if (ContentType.APPLICATION_JSON.equals(convertedContentType) || ContentType.APPLICATION_JSON_CS_UTF_8.equals(convertedContentType)) { return ContentType.APPLICATION_JSON; } } } return ContentType.APPLICATION_XML; } private ContentType getContentType(final UriInfo uriInfo, final HttpHeaders httpHeaders) { ContentType contentType = getContentTypeByUriInfo(uriInfo); if (contentType == null) { contentType = getContentTypeByAcceptHeader(httpHeaders); } return contentType; } private ContentType getContentTypeByUriInfo(final UriInfo uriInfo) { ContentType contentType = null; if (uriInfo != null && uriInfo.getQueryParameters() != null) { MultivaluedMap<String, String> queryParameters = uriInfo.getQueryParameters(); if (queryParameters.containsKey(DOLLAR_FORMAT)) { String contentTypeString = queryParameters.getFirst(DOLLAR_FORMAT); if (DOLLAR_FORMAT_JSON.equals(contentTypeString)) { contentType = ContentType.APPLICATION_JSON; } else { //Any format mentioned in the $format parameter other than json results in an application/xml content type //for error messages due to the OData V2 Specification. contentType = ContentType.APPLICATION_XML; } } } return contentType; } private ContentType getContentTypeByAcceptHeader(final HttpHeaders httpHeaders) { for (MediaType type : httpHeaders.getAcceptableMediaTypes()) { if (ContentType.isParseable(type.toString())) { ContentType convertedContentType = ContentType.create(type.toString()); if (convertedContentType.isWildcard() || ContentType.APPLICATION_XML.equals(convertedContentType) || ContentType.APPLICATION_XML_CS_UTF_8.equals(convertedContentType) || ContentType.APPLICATION_ATOM_XML.equals(convertedContentType) || ContentType.APPLICATION_ATOM_XML_CS_UTF_8.equals(convertedContentType)) { return ContentType.APPLICATION_XML; } else if (ContentType.APPLICATION_JSON.equals(convertedContentType) || ContentType.APPLICATION_JSON_CS_UTF_8.equals(convertedContentType)) { return ContentType.APPLICATION_JSON; } } } return ContentType.APPLICATION_XML; } private ODataErrorCallback getErrorHandlerCallbackFromContext(final ODataContext context) throws ClassNotFoundException, InstantiationException, IllegalAccessException { ODataErrorCallback callback = null; ODataServiceFactory serviceFactory = context.getServiceFactory(); callback = serviceFactory.getCallback(ODataErrorCallback.class); return callback; } private ODataErrorCallback getErrorHandlerCallbackFromServletConfig(final ServletConfig servletConfig, final HttpServletRequest servletRequest) throws InstantiationException, IllegalAccessException, ClassNotFoundException { ODataErrorCallback callback = null; final String factoryClassName = servletConfig.getInitParameter(ODataServiceFactory.FACTORY_LABEL); if (factoryClassName != null) { ClassLoader cl = (ClassLoader) servletRequest.getAttribute(ODataServiceFactory.FACTORY_CLASSLOADER_LABEL); Class<?> factoryClass; if (cl == null) { factoryClass = Class.forName(factoryClassName); } else { factoryClass = Class.forName(factoryClassName, true, cl); } final ODataServiceFactory serviceFactory = (ODataServiceFactory) factoryClass.newInstance(); callback = serviceFactory.getCallback(ODataErrorCallback.class); } return callback; } }
package org.ms2ms.data.ms; import org.apache.commons.math3.stat.regression.SimpleRegression; import org.expasy.mzjava.core.ms.Tolerance; import org.expasy.mzjava.core.ms.spectrum.MsnSpectrum; import org.ms2ms.Disposable; import org.ms2ms.data.collect.MultiTreeTable; import org.ms2ms.math.Histogram; import org.ms2ms.math.QVals; import org.ms2ms.math.Stats; import org.ms2ms.mzjava.AnnotatedPeak; import org.ms2ms.utils.Strs; import org.ms2ms.utils.Tools; /** Keeper of the key matches to a MS/MS spectrum. It's not a single PSM since we don;t assume a single precursor * * Created by yuw on 8/7/16. */ public class Ms2Hits_ implements Disposable { public static final String REJECT_PEAKSPLITTING = "rejected due to peak splitting"; public static final String REJECT_SKEWED_ISO = "rejected due to skewed isotope envelop"; public static final String REJECT_SPARSEPEAK = "rejected due to insufficient peaks"; public static final String REJECT_NONE = "not rejected"; public static final String CTR_GAP = "GapScoreCenter"; public static final String SIG_GAP = "GapScoreSigma"; private Map<String, ScoreModel> mScoreModels; private String mRejection; private Ms2Hit_ mTopRanked=null, mBestDecoy=null; private Map<String, Object> mPeakStats; private Map<String, Double> mBasis = new HashMap<>(); private Map<String, Ms2Hit_> mExceptionals = new HashMap<>(); private Histogram mDecoyY=null, mDecoyB=null; private MsnSpectrum mSpectrum; // 'ladders' build from the y and b ions, respectively (Score, DistinctSeqID, FpmEntry). private Table<Double, Integer, FpmEntry> mCtSegments=null, mNtSegments; private Multimap<Double, Ms2Hit_> mExactMatches, mOpenMatches, mCandidates, mFinalists; public Ms2Hits_() { super(); } public Ms2Hits_(Map<String, Object> stats) { super(); mPeakStats=stats; } public Ms2Hits_(MsnSpectrum ms, Table<Double, Integer, FpmEntry> n, Table<Double, Integer, FpmEntry> c) { super(); mSpectrum=ms; mCtSegments=c; mNtSegments=n; initFpmEntries(); } // public boolean hasExceptional(String s) { return mExceptionals!=null && mExceptionals.get(s)!=null; } public Ms2Hit_ getExceptional(String s) { return mExceptionals.get(s); } public Map<String, Double> getScores() { return mBasis; } public boolean hasBasis(String... s) { if (!Tools.isSet(mBasis)) return false; if (Tools.isSet(s)) for (String t : s) if (!mBasis.containsKey(t)) return false; return true; } public ScoreModel getScoreModel(String s) { return mScoreModels!=null?mScoreModels.get(s):null; } // public Map<String, Double> getBasis() { return mBasis; } public Double getBasis(String s) { return mBasis.get(s); } public double getGapScoreZ(double s) { return hasBasis(CTR_GAP, SIG_GAP) ? ((s-getBasis(CTR_GAP))/(getBasis(SIG_GAP)!=null?getBasis(SIG_GAP):1d)):s; } // public double getMatchProbZ(double s) // { // return hasBasis(CTR_MATCH, SIG_MATCH) ? ((s-getBasis(CTR_MATCH))/(getBasis(SIG_MATCH)!=null?getBasis(SIG_MATCH):1d)):s; // } public Map<String, Object> getPeakStats() { return mPeakStats; } public MsnSpectrum getSpectrum() { return mSpectrum; } public Table<Double, Integer, FpmEntry> getCtSegments() { return mCtSegments; } public Table<Double, Integer, FpmEntry> getNtSegments() { return mNtSegments; } public Multimap<Double, Ms2Hit_> getExactMatches() { return mExactMatches; } public Multimap<Double, Ms2Hit_> getOpenMatches() { return mOpenMatches; } public Multimap<Double, Ms2Hit_> getCandidates() { return mCandidates; } public Multimap<Double, Ms2Hit_> getFinalists() { return mFinalists; } public Ms2Hit_ getBestDecoy() { return mBestDecoy; } public Collection<Ms2Hit_> getMatches() { Collection<Ms2Hit_> mm = new ArrayList<>(size()); if (Tools.isSet(getExactMatches())) for (Ms2Hit_ H : getExactMatches().values()) mm.add(H); if (Tools.isSet(getOpenMatches())) for (Ms2Hit_ H : getOpenMatches().values()) mm.add(H); return mm; } public Ms2Hits_ setSpectrum(MsnSpectrum s) { mSpectrum=s; return this; } public Ms2Hits_ setFinalists(Multimap<Double, Ms2Hit_> s) { mFinalists=s; return this; } // public Ms2Hits_ putExceptional(String s, Ms2Hit_ H) { mExceptionals.put(s, H); return this; } public Ms2Hits_ setScore(String k, Double s) { if (k!=null && s!=null) mBasis.put(k, s); return this; } public Ms2Hits_ setPeakCounts(Map<String, Object> s) { mPeakStats=s; return this; } public Ms2Hits_ reject(String reason) { mRejection=reason; return this; } public boolean isRejected() { return !(mRejection==null || mRejection.equals(REJECT_NONE)); } public String getRejection() { return mRejection; } public Ms2Hits_ setOpenMatches( Multimap<Double, Ms2Hit_> s) { mOpenMatches =null; mOpenMatches =s; return this; } public Ms2Hits_ setExactMatches(Multimap<Double, Ms2Hit_> s) { mExactMatches=null; mExactMatches=s; return this; } public Ms2Hits_ addMatch(Ms2Hit_ s, Tolerance tol) { return addMatch(s, tol.withinTolerance(s.getCalcMH(), s.getCalcMH()+s.getDelta())); } public Ms2Hits_ addMatch(Ms2Hit_ s, boolean isExact) { if (mExactMatches==null) mExactMatches = TreeMultimap.create(Ordering.natural().reverse(), Ordering.natural()); if (mOpenMatches ==null) mOpenMatches = TreeMultimap.create(Ordering.natural().reverse(), Ordering.natural()); if (isExact) mExactMatches.put(s.getGapScore(), s.isExact(true)); else mOpenMatches.put(s.getGapScore(), s.isExact(false)); return this; } public Ms2Hits_ addFinalist(Ms2Hit_ s) { if (mFinalists==null) mFinalists = TreeMultimap.create(Ordering.natural().reverse(), Ordering.natural()); // sort the candidates by the e-val mFinalists.put(s.getScore(), s); return this; } public Multimap<Double, Ms2Hit_> consolidate(Multimap<Double, Ms2Hit_> hits, HashMap<Integer, Ms2Hit_> distincts, MultiTreeTable<Integer, String, Ms2Hit_> seqs, float[] AAs, OffsetPpmTolerance tol, Range<Integer> isoErr, float deci, TreeMultimap<Float, String> blocks) { // start from the Exact matches, higher Gap score first if (!Tools.isSet(hits)) return hits; Iterator<Map.Entry<Double, Ms2Hit_>> itr = hits.entries().iterator(); while (itr.hasNext()) { Map.Entry<Double, Ms2Hit_> E = itr.next(); // potential matches List<Integer> hashes = E.getValue().hashcodeByIntervals(AAs, tol, isoErr, deci, blocks); Ms2Hit_ H = E.getValue(); // check against the case where the same mass ladder is used on multiple peptides Integer mzHash = H.hashcodeByYBmz(); hashes.add(mzHash); // need to find out the prior hit(s) Boolean redundant=null; for (Integer hash : hashes) if (distincts.containsKey(hash)) { if (H.getSequence().equals(distincts.get(hash).getSequence()) && H.getScore() >distincts.get(hash).getScore()+5d || (H.getProteinKey()>0 && distincts.get(hash).getProteinKey()<0)) // in case of small key { // more complicate if we need to remove the prior one distincts.get(hash).invalidate(); distincts.remove(hash); redundant=false; } else redundant=true; } if (redundant==null && seqs.row(H.getProteinKey())!=null && Strs.hasSubStr(H.getSequence(), seqs.row(H.getProteinKey()).keySet())) redundant=true; // deposite the new and distinct hit if (redundant!=null && redundant) E.getValue().invalidate(); else { Tools.putKeysVal(H, distincts, hashes.get(0), hashes.get(1), mzHash); seqs.put(H.getProteinKey(), E.getValue().getSequence(), H); } } return hits; } public Ms2Hits_ consolidateByFragments(double max_miss_pct) { Table<Integer, Ms2Hit_, Set<Double>> hit_frag = HashBasedTable.create(); if (Tools.isSet(getExactMatches())) consolidateByFragments(getExactMatches(), max_miss_pct, hit_frag); if (Tools.isSet(getOpenMatches( ))) consolidateByFragments(getOpenMatches( ), max_miss_pct, hit_frag); return purge(); } public Multimap<Double, Ms2Hit_> consolidateByFragments(Multimap<Double, Ms2Hit_> hits, double max_miss_pct, Table<Integer, Ms2Hit_, Set<Double>> n_hit_frag) { if (!Tools.isSet(hits)) return hits; Set<Double> frags = new HashSet<>(); for (Double scr : hits.keySet()) for (Ms2Hit_ H : hits.get(scr)) { frags.clear(); for (AnnotatedPeak pk : H.getY().getTrack()) frags.add(pk.getIntensity()); for (AnnotatedPeak pk : H.getB().getTrack()) frags.add(pk.getIntensity()); // check the matching fragments to see if they're already used elsewhere if (!consolidateByFragment(frags, max_miss_pct, n_hit_frag)) { // despoite the frags for later use n_hit_frag.put(frags.size(), H, new HashSet<>(frags)); } else H.invalidate(); } return hits; } private boolean consolidateByFragment(Set<Double> frags, double max_miss_pct, Table<Integer, Ms2Hit_, Set<Double>> n_hit_frag) { if (Tools.isSet(frags) && Tools.isSet(n_hit_frag)) for (int i : n_hit_frag.rowKeySet()) { int max_miss = (int )Math.round(i*max_miss_pct*0.01d); if (i-frags.size()>=max_miss) for (Ms2Hit_ E : n_hit_frag.row(i).keySet()) { Sets.SetView<Double> intersect = Sets.intersection(frags, n_hit_frag.get(i, E)); if (frags.size()==intersect.size() && (n_hit_frag.get(i, E).size()-intersect.size())<=max_miss) { // this is a duplicate return true; } } } return false; } public Ms2Hits_ trimCandidates(int tops) { if (!Tools.isSet(mCandidates) || mCandidates.size()<=tops) return this; Iterator<Map.Entry<Double, Ms2Hit_>> itr = mCandidates.entries().iterator(); while (itr.hasNext()) { if (itr.next().getValue().getRank()>tops) itr.remove(); } return this; } // MUST call the 'survey' beforehand to setup the final score public Ms2Hits_ qvalues() { // combine the open and exact matches mCandidates = TreeMultimap.create(Ordering.natural().reverse(), Ordering.natural()); if (Tools.isSet(getExactMatches())) for (Ms2Hit_ H : getExactMatches().values()) mCandidates.put(H.getScore(), H); if (Tools.isSet(getOpenMatches())) for (Ms2Hit_ H : getOpenMatches().values( )) mCandidates.put(H.getScore(), H); // count the total number of decoys double decoys=0; int rank=0; for (Double score : getCandidates().keySet()) { rank++; for (Ms2Hit_ H : getCandidates().get(score)) if (H.setRank(rank).isDecoy()) decoys++; } // setup the qvalue calculation QVals qvals = new QVals(); Histogram all = new Histogram("all"); SimpleRegression R = new SimpleRegression(true); int pct10 = (int )Math.min(15, Math.max(3, 0.1d * decoys)); // System.out.println("Score\tSurvival"); for (Ms2Hit_ F : getCandidates().values()) { qvals.put(F.getScore(), F.isDecoy()); if (F.isDecoy()) { all.add(F.getScore()); if (R.getN()<pct10) { R.addData(F.getScore(), Math.log10((double) all.getData().size() / decoys)); } // System.out.println(F.getScore()+"\t"+(double )all.getData().size()/decoys); } } // System.out.println("Score\tSurvival"); // Collections.sort(all.getData(), Collections.reverseOrder()); // for (int i=0; i<all.getData().size(); i++) // System.out.println(all.getData().get(i)+"\t"+((double )i/(double )all.getData().size())); all.generate(all.getData().size()>100?25:15).assessTruncated(1); if (getScoreModel(Ms2Hit_.SCR_GAP)!=null && all.getCenter()!=null) getScoreModel(Ms2Hit_.SCR_GAP).setCenter(all.getCenter()); // calculate the q-value qvals.model(); setScore(Ms2Hit_.SCR_THRESHOLD, qvals.getThreshold()); // get the expect-value per ProteinProspector //http://prospector.ucsf.edu/prospector/html/misc/publications/2006_ASMS_1.pdf setScore(Ms2Hit_.SCR_EVAL_K, R.getSlope()); setScore(Ms2Hit_.SCR_EVAL_B, R.getIntercept()); for (Ms2Hit_ F : getCandidates().values()) { F.setScore(Ms2Hit_.SCR_EVAL, -10*(R.getSlope()*F.getScore()+R.getIntercept())); // in dB format F.setScore(Ms2Hit_.SCR_SNR, Math.pow(10d, (Math.log10(1d/decoys)-R.getSlope()*F.getScore()-R.getIntercept()))); } return this; } public int size() { int counts = (Tools.isSet(mExactMatches)?mExactMatches.size():0) + (Tools.isSet(mOpenMatches)?mOpenMatches.size():0); return counts>0?counts:(getCandidates()!=null?getCandidates().size():0); } private Ms2Hits_ initFpmEntries() { mDecoyY = new Histogram("decoy y"); mDecoyB = new Histogram("decoy_b"); // skip the duplicated scores for (Double score : getCtSegments().rowKeySet()) { double goods = Stats.greaterEqualThan(getCtSegments().row(score).keySet(), 0d), bads = getCtSegments().row(score).keySet().size()-goods, base = Math.min(goods + bads, 4); // reduce the counts to the base for (int i=0; i<Math.round(bads*(base/(goods+bads))); i++) mDecoyY.add(score); } for (Double score : getNtSegments().rowKeySet()) { double goods = Stats.greaterEqualThan(getNtSegments().row(score).keySet(), 0d), bads = getNtSegments().row(score).keySet().size()-goods, base = Math.min(goods + bads, 4); // reduce the counts to the base for (int i=0; i<Math.round(bads*(base/(goods+bads))); i++) mDecoyB.add(score); } mDecoyB.generate(25); mDecoyY.generate(25); if (Tools.isSet(mDecoyY.getHistogram())) { mDecoyY.assessTruncated(4); setScore(Ms2Hit_.SCR_DECOY_Y, Collections.max(mDecoyY.getData())); setScore(Ms2Hit_.SCR_DECOY_Y0, mDecoyY.getCenter()); setScore(Ms2Hit_.SCR_DECOY_Y1, mDecoyY.getSigma()); } if (Tools.isSet(mDecoyB.getHistogram())) { mDecoyB.assessTruncated(4); setScore(Ms2Hit_.SCR_DECOY_B, Collections.max(mDecoyB.getData())); setScore(Ms2Hit_.SCR_DECOY_B0, mDecoyB.getCenter()); setScore(Ms2Hit_.SCR_DECOY_B1, mDecoyB.getSigma()); } return this; } public Ms2Hits_ purge() { if (Tools.isSet(getExactMatches())) { Iterator<Map.Entry<Double, Ms2Hit_>> itr = getExactMatches().entries().iterator(); while (itr.hasNext()) if (!Strs.isSet(itr.next().getValue().getSequence())) itr.remove(); } if (Tools.isSet(getOpenMatches())) { Iterator<Map.Entry<Double, Ms2Hit_>> itr = getOpenMatches().entries().iterator(); while (itr.hasNext()) { Ms2Hit_ H = itr.next().getValue(); if (!Strs.isSet(H.getSequence())) itr.remove(); // do not allow any open match without some matches in both ends else if (H.getY()==null || !Tools.isSet(H.getY().getTrack()) || H.getB()==null || !Tools.isSet(H.getB().getTrack())) itr.remove(); } } return this; } // setup the score models in preparation for the final composite scores public Ms2Hits_ survey() { mScoreModels = new HashMap<>(); mScoreModels.put(Ms2Hit_.SCR_GAP, new ScoreModel(Ms2Hit_.SCR_GAP)); // mScoreModels.put(Ms2Hit_.SCR_KAI, new ScoreModel(Ms2Hit_.SCR_KAI)); // mScoreModels.put(Ms2Hit_.SCR_MATCH,new ScoreModel(Ms2Hit_.SCR_MATCH)); // gather the decoy data if (Tools.isSet(getExactMatches())) for (Ms2Hit_ hit : getExactMatches().values()) { if (hit.isDecoy()) for (ScoreModel m : mScoreModels.values()) m.addExactDecoy(hit.getScore(Ms2Hit_.SCR_GAP)); } if (Tools.isSet(getOpenMatches())) for (Ms2Hit_ hit : getOpenMatches().values()) { if (hit.isDecoy()) for (ScoreModel m : mScoreModels.values()) m.addOpenDecoy(hit.getScore(Ms2Hit_.SCR_GAP)); } // set the initial counts // setup the offsets by the key len for (ScoreModel m : mScoreModels.values()) { m.setOffset(ScoreModel.eType.exact, Tools.isSet(getExactMatches()) ? getExactMatches().size() : 0); m.setOffset(ScoreModel.eType.open, Tools.isSet(getOpenMatches()) ? getOpenMatches().size() : 0); m.model(ScoreModel.eType.exact, ScoreModel.eType.open); } // create the final scores return adjustGapScores(); } public Ms2Hits_ adjustGapScores() { if (getScoreModel(Ms2Hit_.SCR_GAP)!=null) { // no change if there wasn't enough decoys around double offset = getScoreModel(Ms2Hit_.SCR_GAP).getOffset(ScoreModel.eType.open)!=null? getScoreModel(Ms2Hit_.SCR_GAP).getOffset(ScoreModel.eType.open):0d; // gather the decoy data if (Tools.isSet(getExactMatches())) for (Ms2Hit_ hit : getExactMatches().values()) hit.setScore(Ms2Hit_.SCR_FINAL, hit.getScore(Ms2Hit_.SCR_GAP)); if (Tools.isSet(getOpenMatches())) for (Ms2Hit_ hit : getOpenMatches().values()) hit.setScore(Ms2Hit_.SCR_FINAL, hit.getScore(Ms2Hit_.SCR_GAP)-offset); } return this; } public Ms2Hits_ consolidateNegativeResidue(float[] AAs) { consolidateNegativeResidue(getExactMatches(), AAs); consolidateNegativeResidue(getOpenMatches(), AAs); purge(); return this; } public Multimap<Double, Ms2Hit_> consolidateNegativeResidue(Multimap<Double, Ms2Hit_> hits, float[] AAs) { if (Tools.isSet(hits)) for (Ms2Hit_ H : hits.values()) if (Tools.isSet(H.getMods())) for (Integer loc : H.getMods().keySet()) if (AAs[H.getSequence().charAt(loc-H.getLeft())]+H.getMods().get(loc)<-0.01) { H.invalidate(); break; } return hits; } // remove the duplicates public Ms2Hits_ consolidate(float[] AAs, OffsetPpmTolerance tol, Range<Integer> isoErr, float deci, TreeMultimap<Float, String> blocks) { // remove the invalid hits purge(); HashMap<Integer, Ms2Hit_> distincts = new HashMap<>(); MultiTreeTable<Integer, String, Ms2Hit_> seqs = MultiTreeTable.create(); // remove the redundant matches consolidate(getExactMatches(), distincts, seqs, AAs, tol, isoErr, deci, blocks); consolidate(getOpenMatches(), distincts, seqs, AAs, tol, isoErr, deci, blocks); purge(); return this; } public double getCenterY() { return (hasBasis(Ms2Hit_.SCR_DECOY_Y0, Ms2Hit_.SCR_DECOY_Y1)?getBasis(Ms2Hit_.SCR_DECOY_Y0):0d); } public double getSigmaY( ) { return (hasBasis(Ms2Hit_.SCR_DECOY_Y0, Ms2Hit_.SCR_DECOY_Y1)?getBasis(Ms2Hit_.SCR_DECOY_Y1):1d); } public double getCenterB() { return (hasBasis(Ms2Hit_.SCR_DECOY_B0, Ms2Hit_.SCR_DECOY_B1)?getBasis(Ms2Hit_.SCR_DECOY_B0):0d); } public double getSigmaB( ) { return (hasBasis(Ms2Hit_.SCR_DECOY_B0, Ms2Hit_.SCR_DECOY_B1)?getBasis(Ms2Hit_.SCR_DECOY_B1):1d); } public Ms2Hits_ dispose_intermediates() { // mSpectrum=null; // 'ladders' build from the y and b ions, respectively Tools.dispose(mCtSegments); Tools.dispose(mNtSegments); Tools.dispose(mExactMatches); Tools.dispose(mOpenMatches); Tools.dispose(mDecoyY, mDecoyB); // Tools.dispose(mCandidates); return this; } @Override public void dispose() { Tools.dispose(mPeakStats); mSpectrum=null; // 'ladders' build from the y and b ions, respectively Tools.dispose(mCtSegments); Tools.dispose(mNtSegments); Tools.dispose(mExactMatches); Tools.dispose(mOpenMatches); Tools.dispose(mCandidates); } }
/* * Copyright 2002-2008 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.beans; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.springframework.core.io.Resource; /** * @author Juergen Hoeller */ public class GenericBean<T> { private Set<Integer> integerSet; private List<Resource> resourceList; private List<List<Integer>> listOfLists; private ArrayList<String[]> listOfArrays; private List<Map<Integer, Long>> listOfMaps; private Map plainMap; private Map<Short, Integer> shortMap; private HashMap<Long, ?> longMap; private Map<Number, Collection<? extends Object>> collectionMap; private Map<String, Map<Integer, Long>> mapOfMaps; private Map<Integer, List<Integer>> mapOfLists; private CustomEnum customEnum; private T genericProperty; private List<T> genericListProperty; public GenericBean() { } public GenericBean(Set<Integer> integerSet) { this.integerSet = integerSet; } public GenericBean(Set<Integer> integerSet, List<Resource> resourceList) { this.integerSet = integerSet; this.resourceList = resourceList; } public GenericBean(HashSet<Integer> integerSet, Map<Short, Integer> shortMap) { this.integerSet = integerSet; this.shortMap = shortMap; } public GenericBean(Map<Short, Integer> shortMap, Resource resource) { this.shortMap = shortMap; this.resourceList = Collections.singletonList(resource); } public GenericBean(Map plainMap, Map<Short, Integer> shortMap) { this.plainMap = plainMap; this.shortMap = shortMap; } public GenericBean(HashMap<Long, ?> longMap) { this.longMap = longMap; } public GenericBean(boolean someFlag, Map<Number, Collection<? extends Object>> collectionMap) { this.collectionMap = collectionMap; } public Set<Integer> getIntegerSet() { return integerSet; } public void setIntegerSet(Set<Integer> integerSet) { this.integerSet = integerSet; } public List<Resource> getResourceList() { return resourceList; } public void setResourceList(List<Resource> resourceList) { this.resourceList = resourceList; } public List<List<Integer>> getListOfLists() { return listOfLists; } public ArrayList<String[]> getListOfArrays() { return listOfArrays; } public void setListOfArrays(ArrayList<String[]> listOfArrays) { this.listOfArrays = listOfArrays; } public void setListOfLists(List<List<Integer>> listOfLists) { this.listOfLists = listOfLists; } public List<Map<Integer, Long>> getListOfMaps() { return listOfMaps; } public void setListOfMaps(List<Map<Integer, Long>> listOfMaps) { this.listOfMaps = listOfMaps; } public Map getPlainMap() { return plainMap; } public Map<Short, Integer> getShortMap() { return shortMap; } public void setShortMap(Map<Short, Integer> shortMap) { this.shortMap = shortMap; } public HashMap<Long, ?> getLongMap() { return longMap; } public void setLongMap(HashMap<Long, ?> longMap) { this.longMap = longMap; } public Map<Number, Collection<? extends Object>> getCollectionMap() { return collectionMap; } public void setCollectionMap(Map<Number, Collection<? extends Object>> collectionMap) { this.collectionMap = collectionMap; } public Map<String, Map<Integer, Long>> getMapOfMaps() { return mapOfMaps; } public void setMapOfMaps(Map<String, Map<Integer, Long>> mapOfMaps) { this.mapOfMaps = mapOfMaps; } public Map<Integer, List<Integer>> getMapOfLists() { return mapOfLists; } public void setMapOfLists(Map<Integer, List<Integer>> mapOfLists) { this.mapOfLists = mapOfLists; } public T getGenericProperty() { return genericProperty; } public void setGenericProperty(T genericProperty) { this.genericProperty = genericProperty; } public List<T> getGenericListProperty() { return genericListProperty; } public void setGenericListProperty(List<T> genericListProperty) { this.genericListProperty = genericListProperty; } public CustomEnum getCustomEnum() { return customEnum; } public void setCustomEnum(CustomEnum customEnum) { this.customEnum = customEnum; } public static GenericBean createInstance(Set<Integer> integerSet) { return new GenericBean(integerSet); } public static GenericBean createInstance(Set<Integer> integerSet, List<Resource> resourceList) { return new GenericBean(integerSet, resourceList); } public static GenericBean createInstance(HashSet<Integer> integerSet, Map<Short, Integer> shortMap) { return new GenericBean(integerSet, shortMap); } public static GenericBean createInstance(Map<Short, Integer> shortMap, Resource resource) { return new GenericBean(shortMap, resource); } public static GenericBean createInstance(Map map, Map<Short, Integer> shortMap) { return new GenericBean(map, shortMap); } public static GenericBean createInstance(HashMap<Long, ?> longMap) { return new GenericBean(longMap); } public static GenericBean createInstance(boolean someFlag, Map<Number, Collection<? extends Object>> collectionMap) { return new GenericBean(someFlag, collectionMap); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.core.construction; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import java.util.List; import java.util.Map; import org.apache.beam.sdk.runners.PTransformOverrideFactory.ReplacementOutput; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.util.WindowingStrategy; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PCollection.IsBounded; import org.apache.beam.sdk.values.PCollectionList; import org.apache.beam.sdk.values.PCollectionTuple; import org.apache.beam.sdk.values.POutput; import org.apache.beam.sdk.values.PValue; import org.apache.beam.sdk.values.TaggedPValue; import org.apache.beam.sdk.values.TupleTag; import org.hamcrest.Matchers; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Tests for {@link ReplacementOutputs}. */ @RunWith(JUnit4.class) public class ReplacementOutputsTest { @Rule public ExpectedException thrown = ExpectedException.none(); private TestPipeline p = TestPipeline.create(); private PCollection<Integer> ints = PCollection.createPrimitiveOutputInternal( p, WindowingStrategy.globalDefault(), IsBounded.BOUNDED); private PCollection<Integer> moreInts = PCollection.createPrimitiveOutputInternal( p, WindowingStrategy.globalDefault(), IsBounded.BOUNDED); private PCollection<String> strs = PCollection.createPrimitiveOutputInternal( p, WindowingStrategy.globalDefault(), IsBounded.BOUNDED); private PCollection<Integer> replacementInts = PCollection.createPrimitiveOutputInternal( p, WindowingStrategy.globalDefault(), IsBounded.BOUNDED); private PCollection<Integer> moreReplacementInts = PCollection.createPrimitiveOutputInternal( p, WindowingStrategy.globalDefault(), IsBounded.BOUNDED); private PCollection<String> replacementStrs = PCollection.createPrimitiveOutputInternal( p, WindowingStrategy.globalDefault(), IsBounded.BOUNDED); @Test public void singletonSucceeds() { Map<PValue, ReplacementOutput> replacements = ReplacementOutputs.singleton(ints.expand(), replacementInts); assertThat(replacements, Matchers.<PValue>hasKey(replacementInts)); ReplacementOutput replacement = replacements.get(replacementInts); TaggedPValue taggedInts = Iterables.getOnlyElement(ints.expand()); assertThat(replacement.getOriginal(), equalTo(taggedInts)); assertThat(replacement.getReplacement().getValue(), Matchers.<PValue>equalTo(replacementInts)); } @Test public void singletonMultipleOriginalsThrows() { thrown.expect(IllegalArgumentException.class); ReplacementOutputs.singleton( ImmutableList.copyOf(Iterables.concat(ints.expand(), moreInts.expand())), replacementInts); } @Test public void orderedSucceeds() { List<TaggedPValue> originals = PCollectionList.of(ints).and(moreInts).expand(); Map<PValue, ReplacementOutput> replacements = ReplacementOutputs.ordered( originals, PCollectionList.of(replacementInts).and(moreReplacementInts)); assertThat( replacements.keySet(), Matchers.<PValue>containsInAnyOrder(replacementInts, moreReplacementInts)); ReplacementOutput intsMapping = replacements.get(replacementInts); assertThat(intsMapping.getOriginal().getValue(), Matchers.<PValue>equalTo(ints)); assertThat(intsMapping.getReplacement().getValue(), Matchers.<PValue>equalTo(replacementInts)); ReplacementOutput moreIntsMapping = replacements.get(moreReplacementInts); assertThat(moreIntsMapping.getOriginal().getValue(), Matchers.<PValue>equalTo(moreInts)); assertThat( moreIntsMapping.getReplacement().getValue(), Matchers.<PValue>equalTo(moreReplacementInts)); } @Test public void orderedTooManyReplacements() { thrown.expect(IllegalArgumentException.class); thrown.expectMessage("same size"); ReplacementOutputs.ordered( PCollectionList.of(ints).expand(), PCollectionList.of(replacementInts).and(moreReplacementInts)); } @Test public void orderedTooFewReplacements() { thrown.expect(IllegalArgumentException.class); thrown.expectMessage("same size"); ReplacementOutputs.ordered( PCollectionList.of(ints).and(moreInts).expand(), PCollectionList.of(moreReplacementInts)); } private TupleTag<Integer> intsTag = new TupleTag<>(); private TupleTag<Integer> moreIntsTag = new TupleTag<>(); private TupleTag<String> strsTag = new TupleTag<>(); @Test public void taggedSucceeds() { PCollectionTuple original = PCollectionTuple.of(intsTag, ints).and(strsTag, strs).and(moreIntsTag, moreInts); Map<PValue, ReplacementOutput> replacements = ReplacementOutputs.tagged( original.expand(), PCollectionTuple.of(strsTag, replacementStrs) .and(moreIntsTag, moreReplacementInts) .and(intsTag, replacementInts)); assertThat( replacements.keySet(), Matchers.<PValue>containsInAnyOrder(replacementStrs, replacementInts, moreReplacementInts)); ReplacementOutput intsReplacement = replacements.get(replacementInts); ReplacementOutput strsReplacement = replacements.get(replacementStrs); ReplacementOutput moreIntsReplacement = replacements.get(moreReplacementInts); assertThat( intsReplacement, equalTo( ReplacementOutput.of( TaggedPValue.of(intsTag, ints), TaggedPValue.of(intsTag, replacementInts)))); assertThat( strsReplacement, equalTo( ReplacementOutput.of( TaggedPValue.of(strsTag, strs), TaggedPValue.of(strsTag, replacementStrs)))); assertThat( moreIntsReplacement, equalTo( ReplacementOutput.of( TaggedPValue.of(moreIntsTag, moreInts), TaggedPValue.of(moreIntsTag, moreReplacementInts)))); } /** * When a call to {@link ReplacementOutputs#tagged(List, POutput)} is made where the first * argument contains multiple copies of the same {@link TaggedPValue}, the call succeeds using * that mapping. */ @Test public void taggedMultipleInstances() { List<TaggedPValue> original = ImmutableList.of( TaggedPValue.of(intsTag, ints), TaggedPValue.of(strsTag, strs), TaggedPValue.of(intsTag, ints)); Map<PValue, ReplacementOutput> replacements = ReplacementOutputs.tagged( original, PCollectionTuple.of(strsTag, replacementStrs).and(intsTag, replacementInts)); assertThat( replacements.keySet(), Matchers.<PValue>containsInAnyOrder(replacementStrs, replacementInts)); ReplacementOutput intsReplacement = replacements.get(replacementInts); ReplacementOutput strsReplacement = replacements.get(replacementStrs); assertThat( intsReplacement, equalTo( ReplacementOutput.of( TaggedPValue.of(intsTag, ints), TaggedPValue.of(intsTag, replacementInts)))); assertThat( strsReplacement, equalTo( ReplacementOutput.of( TaggedPValue.of(strsTag, strs), TaggedPValue.of(strsTag, replacementStrs)))); } /** * When a call to {@link ReplacementOutputs#tagged(List, POutput)} is made where a single tag * has multiple {@link PValue PValues} mapped to it, the call fails. */ @Test public void taggedMultipleConflictingInstancesThrows() { List<TaggedPValue> original = ImmutableList.of( TaggedPValue.of(intsTag, ints), TaggedPValue.of(intsTag, moreReplacementInts)); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("different values"); thrown.expectMessage(intsTag.toString()); thrown.expectMessage(ints.toString()); thrown.expectMessage(moreReplacementInts.toString()); ReplacementOutputs.tagged( original, PCollectionTuple.of(strsTag, replacementStrs) .and(moreIntsTag, moreReplacementInts) .and(intsTag, replacementInts)); } @Test public void taggedMissingReplacementThrows() { PCollectionTuple original = PCollectionTuple.of(intsTag, ints).and(strsTag, strs).and(moreIntsTag, moreInts); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Missing replacement"); thrown.expectMessage(intsTag.toString()); thrown.expectMessage(ints.toString()); ReplacementOutputs.tagged( original.expand(), PCollectionTuple.of(strsTag, replacementStrs).and(moreIntsTag, moreReplacementInts)); } @Test public void taggedExtraReplacementThrows() { PCollectionTuple original = PCollectionTuple.of(intsTag, ints).and(strsTag, strs); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Missing original output"); thrown.expectMessage(moreIntsTag.toString()); thrown.expectMessage(moreReplacementInts.toString()); ReplacementOutputs.tagged( original.expand(), PCollectionTuple.of(strsTag, replacementStrs) .and(moreIntsTag, moreReplacementInts) .and(intsTag, replacementInts)); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.eql.EqlSearchRequest; import org.elasticsearch.client.eql.EqlSearchResponse; import org.elasticsearch.client.eql.EqlStatsRequest; import org.elasticsearch.client.eql.EqlStatsResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.junit.Before; import java.io.IOException; import java.time.format.DateTimeFormatter; import java.util.Locale; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; public class EqlIT extends ESRestHighLevelClientTestCase { private static final String INDEX_NAME = "index"; private static final int RECORD_COUNT = 40; private static final int DIVIDER = 4; @Before public void setup() throws Exception { setupRemoteClusterConfig("local_cluster"); setupData(); } private void setupData() throws IOException { final BulkRequest bulkRequest = new BulkRequest(); bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); for (int i = 0; i < RECORD_COUNT; i++) { final IndexRequest indexRequest = new IndexRequest(INDEX_NAME); indexRequest.source(jsonBuilder() .startObject() .field("event_subtype_full", "already_running") .startObject("event") .field("category", "process") .endObject() .field("event_type", "foo") .field("event_type_full", "process_event") .field("opcode", ((i % DIVIDER) == 0) ? 1 : 0) .field("pid", ((i % DIVIDER) == 0) ? 100 : 0) .field("process_name", "System Idle Process") .field("serial_event_id", i + 1) .field("subtype", "create") .field("@timestamp", String.format(Locale.ROOT, "2018-01-01T00:00:%02dZ", i)) .field("unique_pid", ((i % DIVIDER) == 0) ? 101 : 0) .endObject()); bulkRequest.add(indexRequest); } BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT); assertEquals(RestStatus.OK, bulkResponse.status()); assertFalse(bulkResponse.hasFailures()); RefreshResponse refreshResponse = highLevelClient().indices().refresh(new RefreshRequest(INDEX_NAME), RequestOptions.DEFAULT); assertEquals(0, refreshResponse.getFailedShards()); } private void assertResponse(EqlSearchResponse response, int count) { assertNotNull(response); assertFalse(response.isTimeout()); assertNotNull(response.hits()); assertNull(response.hits().sequences()); assertNull(response.hits().counts()); assertNotNull(response.hits().events()); assertThat(response.hits().events().size(), equalTo(count)); } public void testBasicSearch() throws Exception { EqlClient eql = highLevelClient().eql(); EqlSearchRequest request = new EqlSearchRequest("index", "process where true").size(RECORD_COUNT); assertResponse(execute(request, eql::search, eql::searchAsync), RECORD_COUNT); } @SuppressWarnings("unchecked") public void testSimpleConditionSearch() throws Exception { EqlClient eql = highLevelClient().eql(); // test simple conditional EqlSearchRequest request = new EqlSearchRequest("index", "foo where pid > 0"); // test with non-default event.category mapping request.eventCategoryField("event_type").size(RECORD_COUNT); EqlSearchResponse response = execute(request, eql::search, eql::searchAsync); assertResponse(response, RECORD_COUNT / DIVIDER); // test the content of the hits for (SearchHit hit : response.hits().events()) { final Map<String, Object> source = hit.getSourceAsMap(); final Map<String, Object> event = (Map<String, Object>) source.get("event"); assertThat(event.get("category"), equalTo("process")); assertThat(source.get("event_type"), equalTo("foo")); assertThat(source.get("event_type_full"), equalTo("process_event")); assertThat(source.get("opcode"), equalTo(1)); assertThat(source.get("pid"), equalTo(100)); assertThat(source.get("process_name"), equalTo("System Idle Process")); assertThat((int) source.get("serial_event_id"), greaterThan(0)); assertThat(source.get("unique_pid"), equalTo(101)); } } @SuppressWarnings("unchecked") public void testEqualsInFilterConditionSearch() throws Exception { EqlClient eql = highLevelClient().eql(); EqlSearchRequest request = new EqlSearchRequest("index", "process where event_type_full = \"process_event\" and serial_event_id in (1,3,5)"); EqlSearchResponse response = execute(request, eql::search, eql::searchAsync); assertResponse(response, 3); // test the content of the hits for (SearchHit hit : response.hits().events()) { final Map<String, Object> source = hit.getSourceAsMap(); final Map<String, Object> event = (Map<String, Object>) source.get("event"); assertThat(event.get("category"), equalTo("process")); assertThat(source.get("serial_event_id"), anyOf(equalTo(1), equalTo(3), equalTo(5))); } } public void testLargeMapping() throws Exception { final String index = "large_mapping_index"; Request doc1 = new Request(HttpPut.METHOD_NAME, "/" + index + "/_doc/1"); // use more exact fields (dates) than the default to verify that retrieval works and requesting doc values // would fail int PASS_DEFAULT_DOC_VALUES = IndexSettings.MAX_DOCVALUE_FIELDS_SEARCH_SETTING.get(Settings.EMPTY) + 50; String now = DateUtils.nowWithMillisResolution().format(DateTimeFormatter.ISO_DATE_TIME); StringBuilder sb = new StringBuilder(); sb.append("{"); for (int i = 0; i < PASS_DEFAULT_DOC_VALUES; i++) { sb.append("\"datetime" + i + "\":\"" + now + "\""); sb.append(","); } sb.append("\"event\": {\"category\": \"process\"},"); sb.append("\"@timestamp\": \"2020-02-03T12:34:56Z\","); sb.append("\"serial_event_id\": 1"); sb.append("}"); doc1.setJsonEntity(sb.toString()); client().performRequest(doc1); client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh")); EqlClient eql = highLevelClient().eql(); EqlSearchRequest request = new EqlSearchRequest(index, "process where true"); EqlSearchResponse response = execute(request, eql::search, eql::searchAsync); assertNotNull(response); assertNotNull(response.hits()); assertThat(response.hits().events().size(), equalTo(1)); } // Basic test for stats // TODO: add more tests once the stats are hooked up public void testStats() throws Exception { EqlClient eql = highLevelClient().eql(); EqlStatsRequest request = new EqlStatsRequest(); EqlStatsResponse response = execute(request, eql::stats, eql::statsAsync); assertNotNull(response); assertNotNull(response.getHeader()); assertThat(response.getHeader().getTotal(), greaterThan(0)); assertThat(response.getNodes().size(), greaterThan(0)); } }
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license. package com.intellij.util.ui; import com.intellij.openapi.util.NlsContexts; import com.intellij.ui.ClickListener; import com.intellij.ui.SimpleColoredComponent; import com.intellij.ui.SimpleTextAttributes; import com.intellij.ui.UIBundle; import com.intellij.ui.components.JBViewport; import com.intellij.ui.scale.JBUIScale; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.util.ArrayList; import java.util.List; public abstract class StatusText { public static final SimpleTextAttributes DEFAULT_ATTRIBUTES = SimpleTextAttributes.GRAYED_ATTRIBUTES; private static final int Y_GAP = 2; private @Nullable Component myOwner; private Component myMouseTarget; private final @NotNull MouseMotionListener myMouseMotionListener; private final @NotNull ClickListener myClickListener; private boolean myIsDefaultText; private String myText = ""; // Hardcoded layout manages two columns (primary and secondary) with vertically aligned components inside protected final class Column { List<Fragment> fragments = new ArrayList<>(); private final Dimension preferredSize = new Dimension(); } protected final class Fragment { private final SimpleColoredComponent myComponent = new SimpleColoredComponent() { @Override protected void revalidateAndRepaint() { super.revalidateAndRepaint(); updateBounds(); } }; private final Rectangle boundsInColumn = new Rectangle(); private final List<ActionListener> myClickListeners = ContainerUtil.createLockFreeCopyOnWriteList(); public Fragment() { myComponent.setOpaque(false); myComponent.setFont(StartupUiUtil.getLabelFont()); } } private final Column myPrimaryColumn = new Column(); private final Column mySecondaryColumn = new Column(); private boolean myHasActiveClickListeners; // calculated field for performance optimization private boolean myShowAboveCenter = true; private Font myFont = null; private boolean myCenterAlignText = true; protected StatusText(JComponent owner) { this(); attachTo(owner); } public StatusText() { myClickListener = new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent e, int clickCount) { if (e.getButton() == MouseEvent.BUTTON1 && clickCount == 1) { ActionListener actionListener = findActionListenerAt(e.getPoint()); if (actionListener != null) { actionListener.actionPerformed(new ActionEvent(e, 0, "")); return true; } } return false; } }; myMouseMotionListener = new MouseAdapter() { private Cursor myOriginalCursor; @Override public void mouseMoved(final MouseEvent e) { if (isStatusVisible()) { if (findActionListenerAt(e.getPoint()) != null) { if (myOriginalCursor == null) { myOriginalCursor = myMouseTarget.getCursor(); myMouseTarget.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR)); } } else if (myOriginalCursor != null) { myMouseTarget.setCursor(myOriginalCursor); myOriginalCursor = null; } } } }; setText(getDefaultEmptyText(), DEFAULT_ATTRIBUTES); myIsDefaultText = true; } protected boolean isFontSet() { return myFont != null; } public void setFont(@NotNull Font font) { myPrimaryColumn.fragments.forEach(fragment -> fragment.myComponent.setFont(font)); mySecondaryColumn.fragments.forEach(fragment -> fragment.myComponent.setFont(font)); myFont = font; } public boolean isCenterAlignText() { return myCenterAlignText; } public void setCenterAlignText(boolean centerAlignText) { myCenterAlignText = centerAlignText; } public void attachTo(@Nullable Component owner) { attachTo(owner, owner); } public void attachTo(@Nullable Component owner, @Nullable Component mouseTarget) { if (myMouseTarget != null) { myClickListener.uninstall(myMouseTarget); myMouseTarget.removeMouseMotionListener(myMouseMotionListener); } myOwner = owner; myMouseTarget = mouseTarget; if (myMouseTarget != null) { myClickListener.installOn(myMouseTarget); myMouseTarget.addMouseMotionListener(myMouseMotionListener); } } protected abstract boolean isStatusVisible(); private static @Nullable ActionListener findListener(@NotNull SimpleColoredComponent component, @NotNull List<? extends ActionListener> listeners, int xCoord) { int index = component.findFragmentAt(xCoord); if (index >= 0 && index < listeners.size()) { return listeners.get(index); } return null; } private @Nullable ActionListener findActionListenerAt(Point point) { if (!myHasActiveClickListeners || !isStatusVisible()) return null; point = SwingUtilities.convertPoint(myMouseTarget, point, myOwner); Rectangle commonBounds = getTextComponentBound(); if (commonBounds.contains(point)) { ActionListener listener = getListener(myPrimaryColumn, point, commonBounds); if (listener != null) return listener; listener = getListener(mySecondaryColumn, point, commonBounds); if (listener != null) return listener; } return null; } private @Nullable ActionListener getListener(Column column, Point point, Rectangle commonBounds) { Point primaryLocation = getColumnLocation(column == myPrimaryColumn, commonBounds); for (Fragment fragment : column.fragments) { Rectangle fragmentBounds = getFragmentBounds(column, primaryLocation, commonBounds, fragment); if (!fragmentBounds.contains(new Point(point.x, point.y))) continue; ActionListener listener = findListener(fragment.myComponent, fragment.myClickListeners, point.x - fragmentBounds.x); if (listener != null) return listener; } return null; } protected Rectangle getTextComponentBound() { Rectangle ownerRec = myOwner == null ? new Rectangle(0, 0, 0, 0) : myOwner.getBounds(); Dimension size = getPreferredSize(); int x = (ownerRec.width - size.width) / 2; int y = (ownerRec.height - size.height) / (myShowAboveCenter ? 3 : 2); return new Rectangle(x, y, size.width, size.height); } public Point getPointBelow() { final var textComponentBound = getTextComponentBound(); return new Point(textComponentBound.x, textComponentBound.y + textComponentBound.height); } public final boolean isShowAboveCenter() { return myShowAboveCenter; } public final StatusText setShowAboveCenter(boolean showAboveCenter) { myShowAboveCenter = showAboveCenter; return this; } public @NotNull String getText() { return myText; } public StatusText setText(@NlsContexts.StatusText String text) { return setText(text, DEFAULT_ATTRIBUTES); } public StatusText setText(@NlsContexts.StatusText String text, SimpleTextAttributes attrs) { return clear().appendText(text, attrs); } public StatusText clear() { myText = ""; myPrimaryColumn.fragments.clear(); mySecondaryColumn.fragments.clear(); myHasActiveClickListeners = false; repaintOwner(); return this; } private void repaintOwner() { if (myOwner != null && isStatusVisible()) myOwner.repaint(); } public StatusText appendText(@NlsContexts.StatusText String text) { return appendText(text, DEFAULT_ATTRIBUTES); } public StatusText appendText(@NlsContexts.StatusText String text, SimpleTextAttributes attrs) { return appendText(text, attrs, null); } public StatusText appendText(@NlsContexts.StatusText String text, SimpleTextAttributes attrs, ActionListener listener) { if (myIsDefaultText) { clear(); myIsDefaultText = false; } myText += text; return appendText(true, Math.max(0, myPrimaryColumn.fragments.size() - 1), text, attrs, listener); } public StatusText appendText(boolean isPrimaryColumn, int row, @NlsContexts.StatusText String text, SimpleTextAttributes attrs, ActionListener listener) { return appendText(isPrimaryColumn, row, null, text, attrs, listener); } public StatusText appendText(boolean isPrimaryColumn, int row, @Nullable Icon icon, @NlsContexts.StatusText String text, SimpleTextAttributes attrs, ActionListener listener) { Fragment fragment = getOrCreateFragment(isPrimaryColumn, row); fragment.myComponent.setIcon(icon); fragment.myComponent.append(text, attrs); fragment.myClickListeners.add(listener); myHasActiveClickListeners |= listener != null; updateBounds(); repaintOwner(); return this; } private void updateBounds() { updateBounds(myPrimaryColumn); updateBounds(mySecondaryColumn); } private void updateBounds(Column column) { Dimension size = new Dimension(); for (int i = 0; i < column.fragments.size(); i++) { Fragment fragment = column.fragments.get(i); Dimension d = fragment.myComponent.getPreferredSize(); fragment.boundsInColumn.setBounds(0, size.height, d.width, d.height); size.height += d.height; if (i > 0) size.height += JBUIScale.scale(Y_GAP); size.width = Math.max(size.width, d.width); } if (myCenterAlignText) { for (int i = 0; i < column.fragments.size(); i++) { Fragment fragment = column.fragments.get(i); fragment.boundsInColumn.x += (size.width - fragment.boundsInColumn.width)/2; } } column.preferredSize.setSize(size); } private Fragment getOrCreateFragment(boolean isPrimaryColumn, int row) { Column column = isPrimaryColumn ? myPrimaryColumn : mySecondaryColumn; if (column.fragments.size() < row) { throw new IllegalStateException("Cannot add text to row " + row + " as in " + (isPrimaryColumn ? "left" : "right") + " column there are " + column.fragments.size() + " rows only"); } Fragment fragment; if (column.fragments.size() == row) { fragment = new Fragment(); if (myFont != null) { fragment.myComponent.setFont(myFont); } column.fragments.add(fragment); } else { fragment = column.fragments.get(row); } return fragment; } public @NotNull StatusText appendSecondaryText(@NotNull @NlsContexts.StatusText String text, @NotNull SimpleTextAttributes attrs, @Nullable ActionListener listener) { return appendText(true, 1, text, attrs, listener); } public @NotNull StatusText appendLine(@NotNull @NlsContexts.StatusText String text) { return appendLine(text, DEFAULT_ATTRIBUTES, null); } public StatusText appendLine(@NotNull @NlsContexts.StatusText String text, @NotNull SimpleTextAttributes attrs, @Nullable ActionListener listener) { return appendLine(null, text, attrs, listener); } public StatusText appendLine(@Nullable Icon icon, @NotNull @NlsContexts.StatusText String text, @NotNull SimpleTextAttributes attrs, @Nullable ActionListener listener) { if (myIsDefaultText) { clear(); myIsDefaultText = false; } return appendText(true, myPrimaryColumn.fragments.size(), icon, text, attrs, listener); } public void paint(Component owner, Graphics g) { if (!isStatusVisible()) return; if (owner == myOwner) { doPaintStatusText(g, getTextComponentBound()); } else { paintOnComponentUnderViewport(owner, g); } } private void paintOnComponentUnderViewport(Component component, Graphics g) { JBViewport viewport = ObjectUtils.tryCast(myOwner, JBViewport.class); if (viewport == null || viewport.getView() != component || viewport.isPaintingNow()) return; // We're painting a component which has a viewport as it's ancestor. // As the viewport paints status text, we'll erase it, so we need to schedule a repaint for the viewport with status text's bounds. // But it causes flicker, so we paint status text over the component first and then schedule the viewport repaint. Rectangle textBoundsInViewport = getTextComponentBound(); int xInOwner = textBoundsInViewport.x - component.getX(); int yInOwner = textBoundsInViewport.y - component.getY(); Rectangle textBoundsInOwner = new Rectangle(xInOwner, yInOwner, textBoundsInViewport.width, textBoundsInViewport.height); doPaintStatusText(g, textBoundsInOwner); viewport.repaint(textBoundsInViewport); } private Point getColumnLocation(boolean isPrimary, Rectangle bounds) { if (isPrimary && mySecondaryColumn.fragments.isEmpty()) { return new Point(bounds.x + (bounds.width - myPrimaryColumn.preferredSize.width) / 2, bounds.y); } if (isPrimary) return new Point(bounds.x, bounds.y); return new Point(bounds.x + bounds.width - mySecondaryColumn.preferredSize.width, bounds.y); } private void doPaintStatusText(@NotNull Graphics g, @NotNull Rectangle bounds) { paintColumnInBounds(myPrimaryColumn, g, getColumnLocation(true, bounds), bounds); paintColumnInBounds(mySecondaryColumn, g, getColumnLocation(false, bounds), bounds); } protected @NotNull Rectangle adjustComponentBounds(@NotNull JComponent component, @NotNull Rectangle bounds) { Dimension size = component.getPreferredSize(); if (mySecondaryColumn.fragments.isEmpty()) { return new Rectangle(bounds.x + (bounds.width - size.width) / 2, bounds.y, size.width, size.height); } else { return component == getComponent() ? new Rectangle(bounds.x, bounds.y, size.width, size.height) : new Rectangle(bounds.x + bounds.width - size.width, bounds.y, size.width, size.height); } } private void paintColumnInBounds(Column column, Graphics g, Point location, Rectangle bounds) { for (Fragment fragment : column.fragments) { Rectangle r = getFragmentBounds(column, location, bounds, fragment); paintComponentInBounds(fragment.myComponent, g, r); } } private @NotNull Rectangle getFragmentBounds(Column column, Point columnLocation, Rectangle bounds, Fragment fragment) { Rectangle r = new Rectangle(); r.setBounds(fragment.boundsInColumn); r.x += columnLocation.x; r.y += columnLocation.y; if (column.fragments.size() == 1) { r = adjustComponentBounds(fragment.myComponent, bounds); } return r; } private static void paintComponentInBounds(@NotNull SimpleColoredComponent component, @NotNull Graphics g, @NotNull Rectangle bounds) { Graphics2D g2 = (Graphics2D)g.create(bounds.x, bounds.y, bounds.width, bounds.height); try { component.setBounds(0, 0, bounds.width, bounds.height); component.paint(g2); } finally { g2.dispose(); } } public @NotNull SimpleColoredComponent getComponent() { return getOrCreateFragment(true, 0).myComponent; } public @NotNull SimpleColoredComponent getSecondaryComponent() { return getOrCreateFragment(true, 1).myComponent; } public Dimension getPreferredSize() { return new Dimension(myPrimaryColumn.preferredSize.width + mySecondaryColumn.preferredSize.width, Math.max(myPrimaryColumn.preferredSize.height, mySecondaryColumn.preferredSize.height)); } public static @NlsContexts.StatusText String getDefaultEmptyText() { return UIBundle.message("message.nothingToShow"); } }
package org.mercycorps.translationcards.activity; import android.Manifest; import android.app.AlertDialog; import android.app.DownloadManager; import android.content.BroadcastReceiver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.IntentFilter; import android.content.pm.PackageManager; import android.net.Uri; import android.os.Build; import android.os.Bundle; import android.os.Environment; import android.support.v4.app.ActivityCompat; import android.support.v7.app.AppCompatActivity; import org.mercycorps.translationcards.MainApplication; import org.mercycorps.translationcards.R; import org.mercycorps.translationcards.porting.ImportException; import org.mercycorps.translationcards.porting.TxcImportUtility; import org.mercycorps.translationcards.repository.DeckRepository; import org.mercycorps.translationcards.repository.DictionaryRepository; import org.mercycorps.translationcards.repository.TranslationRepository; import org.mercycorps.translationcards.service.LanguageService; import java.io.File; import javax.inject.Inject; public class ImportActivity extends AppCompatActivity { public static final int PERMISSION_REQUEST_EXTERNAL_WRITE = 1; private TxcImportUtility portingUtility; private Uri source; private BroadcastReceiver onDownloadComplete; private AlertDialog downloadDialog; @Inject LanguageService languageService; @Inject TranslationRepository translationRepository; @Inject DictionaryRepository dictionaryRepository; @Inject DeckRepository deckRepository; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); MainApplication application = (MainApplication) getApplication(); application.getBaseComponent().inject(this); portingUtility = createImportUtility(); source = getIntent().getData(); onDownloadComplete = new BroadcastReceiver() { public void onReceive(Context context, Intent intent) { downloadDialog.dismiss(); unregisterReceiver(onDownloadComplete); importDeck(); } }; registerReceiver(onDownloadComplete, new IntentFilter(DownloadManager.ACTION_DOWNLOAD_COMPLETE)); requestPermissionsAndLoadData(); } @Override public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { switch (requestCode) { case PERMISSION_REQUEST_EXTERNAL_WRITE: if (grantResults[0] == PackageManager.PERMISSION_GRANTED) { loadDataAndImport(); } else { unregisterReceiver(onDownloadComplete); finish(); } break; default: super.onRequestPermissionsResult(requestCode, permissions, grantResults); } } private TxcImportUtility createImportUtility() { return new TxcImportUtility(languageService, deckRepository, translationRepository, dictionaryRepository); } protected void requestPermissionsAndLoadData() { if (Build.VERSION.SDK_INT >= 23) { if (checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED) { loadDataAndImport(); } else { ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, PERMISSION_REQUEST_EXTERNAL_WRITE); } } else { loadDataAndImport(); } } private void loadDataAndImport() { if (sourceIsURL()) { downloadFile(); } else { importDeck(); } } private void downloadFile() { String filename = getFilenameFromURL(); showDownloadAlertDialog(filename); String uniqueFilename = filename + "." + System.currentTimeMillis(); DownloadManager.Request request = new DownloadManager.Request(source); request.setDestinationInExternalPublicDir(Environment.DIRECTORY_DOWNLOADS, uniqueFilename); DownloadManager downloadManager = (DownloadManager) getSystemService(Context.DOWNLOAD_SERVICE); downloadManager.enqueue(request); String path = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOWNLOADS) + "/" + uniqueFilename; File downloadedDeck = new File(path); this.source = Uri.fromFile(downloadedDeck); } private String getFilenameFromURL() { String[] parsedURL = source.toString().split("/"); return parsedURL[parsedURL.length - 1]; } private void showDownloadAlertDialog(String filename) { downloadDialog = new AlertDialog.Builder(ImportActivity.this) .setTitle(R.string.file_download_title) .setMessage(filename) .show(); } private boolean sourceIsURL() { return source.getScheme().equals("http") || source.getScheme().equals("https"); } private void importDeck() { new AlertDialog.Builder(this) .setTitle(R.string.import_confirm_alert_title) .setMessage(getString(R.string.import_confirm_alert_message)) .setPositiveButton(R.string.import_confirm_alert_positive, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { attemptImport(); } }) .setNegativeButton(R.string.import_confirm_alert_negative, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { ImportActivity.this.finish(); } }) .show(); } private void attemptImport() { try { TxcImportUtility.ImportSpec importSpec = portingUtility.prepareImport(ImportActivity.this, source); // Check if it's a deck we've already imported. if (false && portingUtility.isExistingDeck(importSpec)) { portingUtility.abortImport(importSpec); alertUserOfFailure(getString(R.string.import_failure_existing_deck)); return; } // Check if it's a different version of a deck we've already imported. if (importSpec.externalId != null && !importSpec.externalId.isEmpty()) { long otherVersion = portingUtility.getExistingDeckId(importSpec); if (otherVersion != -1) { handleVersionOverride(importSpec, otherVersion); return; } } portingUtility.executeImport(importSpec); } catch (ImportException e) { handleError(e); return; } goToMainScreen(); } private void handleVersionOverride( final TxcImportUtility.ImportSpec importSpec, final long otherVersion) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setTitle(R.string.import_version_override_title) .setItems(R.array.version_override_options, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { switch (which) { case 0: finish(); break; case 1: try { portingUtility.executeImport(importSpec); } catch (ImportException e) { handleError(e); return; } goToMainScreen(); break; case 2: try { portingUtility.executeImport(importSpec); } catch (ImportException e) { handleError(e); return; } deckRepository.deleteDeck(otherVersion); goToMainScreen(); break; } } }); builder.show(); } private void handleError(ImportException e) { String errorMessage = getString(R.string.import_failure_default_error_message); if (e.getProblem() == ImportException.ImportProblem.FILE_NOT_FOUND) { errorMessage = getString(R.string.import_failure_file_not_found_error_message); } else if (e.getProblem() == ImportException.ImportProblem.NO_INDEX_FILE) { errorMessage = getString(R.string.import_failure_no_index_file_error_message); } else if (e.getProblem() == ImportException.ImportProblem.INVALID_INDEX_FILE) { errorMessage = getString(R.string.import_failure_invalid_index_file_error_message); } else if (e.getProblem() == ImportException.ImportProblem.READ_ERROR) { errorMessage = getString(R.string.import_failure_read_error_error_message); } alertUserOfFailure(errorMessage); } private void alertUserOfFailure(String errorMessage) { new AlertDialog.Builder(this) .setTitle(R.string.import_failure_alert_title) .setMessage(errorMessage) .setNeutralButton(R.string.misc_ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { ImportActivity.this.finish(); } }) .show(); } private void goToMainScreen() { Intent intent = new Intent(this, MyDecksActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); finish(); } }
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.builder; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import org.springframework.beans.factory.support.BeanNameGenerator; import org.springframework.boot.Banner; import org.springframework.boot.SpringApplication; import org.springframework.boot.WebApplicationType; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextInitializer; import org.springframework.context.ApplicationListener; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.core.env.ConfigurableEnvironment; import org.springframework.core.io.ResourceLoader; /** * Builder for {@link SpringApplication} and {@link ApplicationContext} instances with * convenient fluent API and context hierarchy support. Simple example of a context * hierarchy: * * <pre class="code"> * new SpringApplicationBuilder(ParentConfig.class).child(ChildConfig.class).run(args); * </pre> * * Another common use case is setting active profiles and default properties to set up the * environment for an application: * * <pre class="code"> * new SpringApplicationBuilder(Application.class).profiles(&quot;server&quot;) * .properties(&quot;transport=local&quot;).run(args); * </pre> * * <p> * If your needs are simpler, consider using the static convenience methods in * SpringApplication instead. * * @author Dave Syer * @author Andy Wilkinson * @see SpringApplication */ public class SpringApplicationBuilder { private final SpringApplication application; private ConfigurableApplicationContext context; private SpringApplicationBuilder parent; private final AtomicBoolean running = new AtomicBoolean(false); private final Set<Class<?>> sources = new LinkedHashSet<>(); private final Map<String, Object> defaultProperties = new LinkedHashMap<>(); private ConfigurableEnvironment environment; private Set<String> additionalProfiles = new LinkedHashSet<>(); private boolean registerShutdownHookApplied; private boolean configuredAsChild = false; public SpringApplicationBuilder(Class<?>... sources) { this.application = createSpringApplication(sources); } /** * Creates a new {@link org.springframework.boot.SpringApplication} instances from the * given sources. Subclasses may override in order to provide a custom subclass of * {@link org.springframework.boot.SpringApplication} * @param sources The sources * @return The {@link org.springframework.boot.SpringApplication} instance * @since 1.1.0 */ protected SpringApplication createSpringApplication(Class<?>... sources) { return new SpringApplication(sources); } /** * Accessor for the current application context. * @return the current application context (or null if not yet running) */ public ConfigurableApplicationContext context() { return this.context; } /** * Accessor for the current application. * @return the current application (never null) */ public SpringApplication application() { return this.application; } /** * Create an application context (and its parent if specified) with the command line * args provided. The parent is run first with the same arguments if has not yet been * started. * @param args the command line arguments * @return an application context created from the current state */ public ConfigurableApplicationContext run(String... args) { if (this.running.get()) { // If already created we just return the existing context return this.context; } configureAsChildIfNecessary(args); if (this.running.compareAndSet(false, true)) { synchronized (this.running) { // If not already running copy the sources over and then run. this.context = build().run(args); } } return this.context; } private void configureAsChildIfNecessary(String... args) { if (this.parent != null && !this.configuredAsChild) { this.configuredAsChild = true; if (!this.registerShutdownHookApplied) { this.application.setRegisterShutdownHook(false); } initializers(new ParentContextApplicationContextInitializer( this.parent.run(args))); } } /** * Returns a fully configured {@link SpringApplication} that is ready to run. * @return the fully configured {@link SpringApplication}. */ public SpringApplication build() { return build(new String[0]); } /** * Returns a fully configured {@link SpringApplication} that is ready to run. Any * parent that has been configured will be run with the given {@code args}. * @param args the parent's args * @return the fully configured {@link SpringApplication}. */ public SpringApplication build(String... args) { configureAsChildIfNecessary(args); this.application.addPrimarySources(this.sources); return this.application; } /** * Create a child application with the provided sources. Default args and environment * are copied down into the child, but everything else is a clean sheet. * @param sources the sources for the application (Spring configuration) * @return the child application builder */ public SpringApplicationBuilder child(Class<?>... sources) { SpringApplicationBuilder child = new SpringApplicationBuilder(); child.sources(sources); // Copy environment stuff from parent to child child.properties(this.defaultProperties).environment(this.environment) .additionalProfiles(this.additionalProfiles); child.parent = this; // It's not possible if embedded web server are enabled to support web contexts as // parents because the servlets cannot be initialized at the right point in // lifecycle. web(false); // Probably not interested in multiple banners bannerMode(Banner.Mode.OFF); // Make sure sources get copied over this.application.addPrimarySources(this.sources); return child; } /** * Add a parent application with the provided sources. Default args and environment * are copied up into the parent, but everything else is a clean sheet. * @param sources the sources for the application (Spring configuration) * @return the parent builder */ public SpringApplicationBuilder parent(Class<?>... sources) { if (this.parent == null) { this.parent = new SpringApplicationBuilder(sources).web(false) .properties(this.defaultProperties).environment(this.environment); } else { this.parent.sources(sources); } return this.parent; } private SpringApplicationBuilder runAndExtractParent(String... args) { if (this.context == null) { run(args); } if (this.parent != null) { return this.parent; } throw new IllegalStateException( "No parent defined yet (please use the other overloaded parent methods to set one)"); } /** * Add an already running parent context to an existing application. * @param parent the parent context * @return the current builder (not the parent) */ public SpringApplicationBuilder parent(ConfigurableApplicationContext parent) { this.parent = new SpringApplicationBuilder(); this.parent.context = parent; this.parent.running.set(true); return this; } /** * Create a sibling application (one with the same parent). A side effect of calling * this method is that the current application (and its parent) are started. * @param sources the sources for the application (Spring configuration) * @return the new sibling builder */ public SpringApplicationBuilder sibling(Class<?>... sources) { return runAndExtractParent().child(sources); } /** * Create a sibling application (one with the same parent). A side effect of calling * this method is that the current application (and its parent) are started if they * are not already running. * @param sources the sources for the application (Spring configuration) * @param args the command line arguments to use when starting the current app and its * parent * @return the new sibling builder */ public SpringApplicationBuilder sibling(Class<?>[] sources, String... args) { return runAndExtractParent(args).child(sources); } /** * Explicitly set the context class to be used. * @param cls the context class to use * @return the current builder */ public SpringApplicationBuilder contextClass( Class<? extends ConfigurableApplicationContext> cls) { this.application.setApplicationContextClass(cls); return this; } /** * Add more sources (configuration classes and components) to this application. * @param sources the sources to add * @return the current builder */ public SpringApplicationBuilder sources(Class<?>... sources) { this.sources.addAll(new LinkedHashSet<>(Arrays.asList(sources))); return this; } /** * Flag to explicitly request a web or non-web environment (auto detected based on * classpath if not set). * @param webEnvironment the flag to set * @return the current builder * @deprecated since 2.0.0 in favour of {@link #web(WebApplicationType)} */ @Deprecated public SpringApplicationBuilder web(boolean webEnvironment) { this.application.setWebEnvironment(webEnvironment); return this; } /** * Flag to explicitly request a specific type of web application. Auto-detected based * on the classpath if not set. * @param webApplication the type of web application * @return the current builder * @since 2.0.0 */ public SpringApplicationBuilder web(WebApplicationType webApplication) { this.application.setWebApplicationType(webApplication); return this; } /** * Flag to indicate the startup information should be logged. * @param logStartupInfo the flag to set. Default true. * @return the current builder */ public SpringApplicationBuilder logStartupInfo(boolean logStartupInfo) { this.application.setLogStartupInfo(logStartupInfo); return this; } /** * Sets the {@link Banner} instance which will be used to print the banner when no * static banner file is provided. * @param banner The banner to use * @return the current builder */ public SpringApplicationBuilder banner(Banner banner) { this.application.setBanner(banner); return this; } public SpringApplicationBuilder bannerMode(Banner.Mode bannerMode) { this.application.setBannerMode(bannerMode); return this; } /** * Sets if the application is headless and should not instantiate AWT. Defaults to * {@code true} to prevent java icons appearing. * @param headless if the application is headless * @return the current builder */ public SpringApplicationBuilder headless(boolean headless) { this.application.setHeadless(headless); return this; } /** * Sets if the created {@link ApplicationContext} should have a shutdown hook * registered. * @param registerShutdownHook if the shutdown hook should be registered * @return the current builder */ public SpringApplicationBuilder registerShutdownHook(boolean registerShutdownHook) { this.registerShutdownHookApplied = true; this.application.setRegisterShutdownHook(registerShutdownHook); return this; } /** * Fixes the main application class that is used to anchor the startup messages. * @param mainApplicationClass the class to use. * @return the current builder */ public SpringApplicationBuilder main(Class<?> mainApplicationClass) { this.application.setMainApplicationClass(mainApplicationClass); return this; } /** * Flag to indicate that command line arguments should be added to the environment. * @param addCommandLineProperties the flag to set. Default true. * @return the current builder */ public SpringApplicationBuilder addCommandLineProperties( boolean addCommandLineProperties) { this.application.setAddCommandLineProperties(addCommandLineProperties); return this; } /** * Default properties for the environment in the form {@code key=value} or * {@code key:value}. * @param defaultProperties the properties to set. * @return the current builder */ public SpringApplicationBuilder properties(String... defaultProperties) { return properties(getMapFromKeyValuePairs(defaultProperties)); } private Map<String, Object> getMapFromKeyValuePairs(String[] properties) { Map<String, Object> map = new HashMap<>(); for (String property : properties) { int index = lowestIndexOf(property, ":", "="); String key = property.substring(0, index > 0 ? index : property.length()); String value = index > 0 ? property.substring(index + 1) : ""; map.put(key, value); } return map; } private int lowestIndexOf(String property, String... candidates) { int index = -1; for (String candidate : candidates) { int candidateIndex = property.indexOf(candidate); if (candidateIndex > 0) { index = (index == -1 ? candidateIndex : Math.min(index, candidateIndex)); } } return index; } /** * Default properties for the environment in the form {@code key=value} or * {@code key:value}. * @param defaultProperties the properties to set. * @return the current builder */ public SpringApplicationBuilder properties(Properties defaultProperties) { return properties(getMapFromProperties(defaultProperties)); } private Map<String, Object> getMapFromProperties(Properties properties) { HashMap<String, Object> map = new HashMap<>(); for (Object key : Collections.list(properties.propertyNames())) { map.put((String) key, properties.get(key)); } return map; } /** * Default properties for the environment. Multiple calls to this method are * cumulative. * @param defaults the default properties * @return the current builder * @see SpringApplicationBuilder#properties(String...) */ public SpringApplicationBuilder properties(Map<String, Object> defaults) { this.defaultProperties.putAll(defaults); this.application.setDefaultProperties(this.defaultProperties); if (this.parent != null) { this.parent.properties(this.defaultProperties); this.parent.environment(this.environment); } return this; } /** * Add to the active Spring profiles for this app (and its parent and children). * @param profiles the profiles to add. * @return the current builder */ public SpringApplicationBuilder profiles(String... profiles) { this.additionalProfiles.addAll(Arrays.asList(profiles)); this.application.setAdditionalProfiles(this.additionalProfiles .toArray(new String[this.additionalProfiles.size()])); return this; } private SpringApplicationBuilder additionalProfiles( Collection<String> additionalProfiles) { this.additionalProfiles = new LinkedHashSet<>(additionalProfiles); this.application.setAdditionalProfiles(this.additionalProfiles .toArray(new String[this.additionalProfiles.size()])); return this; } /** * Bean name generator for automatically generated bean names in the application * context. * @param beanNameGenerator the generator to set. * @return the current builder */ public SpringApplicationBuilder beanNameGenerator( BeanNameGenerator beanNameGenerator) { this.application.setBeanNameGenerator(beanNameGenerator); return this; } /** * Environment for the application context. * @param environment the environment to set. * @return the current builder */ public SpringApplicationBuilder environment(ConfigurableEnvironment environment) { this.application.setEnvironment(environment); this.environment = environment; return this; } /** * {@link ResourceLoader} for the application context. If a custom class loader is * needed, this is where it would be added. * @param resourceLoader the resource loader to set. * @return the current builder */ public SpringApplicationBuilder resourceLoader(ResourceLoader resourceLoader) { this.application.setResourceLoader(resourceLoader); return this; } /** * Add some initializers to the application (applied to the {@link ApplicationContext} * before any bean definitions are loaded). * @param initializers some initializers to add * @return the current builder */ public SpringApplicationBuilder initializers( ApplicationContextInitializer<?>... initializers) { this.application.addInitializers(initializers); return this; } /** * Add some listeners to the application (listening for SpringApplication events as * well as regular Spring events once the context is running). Any listeners that are * also {@link ApplicationContextInitializer} will be added to the * {@link #initializers(ApplicationContextInitializer...) initializers} automatically. * @param listeners some listeners to add * @return the current builder */ public SpringApplicationBuilder listeners(ApplicationListener<?>... listeners) { this.application.addListeners(listeners); return this; } }
package com.tlongdev.hexle.model.field; import com.tlongdev.hexle.factory.TileFactory; import com.tlongdev.hexle.model.Field; import com.tlongdev.hexle.model.Tile; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; /** * @author longi * @since 2016.04.15. */ public class MinimizeGapsTest { private static Tile[] row1; private static Tile[] row2; private static Tile[] row3; private static Tile[] row4; private static TileFactory factory; private Tile[] result1; private Tile[] result2; private Tile[] result3; private Tile[] result4; @BeforeClass public static void setUpBeforeClass() { row1 = new Tile[4]; row2 = new Tile[8]; row3 = new Tile[12]; row4 = new Tile[16]; factory = new TileFactory(); for (int i = 0; i < 16; i++) { if (i < row1.length) { row1[i] = factory.get(0, 0); } if (i < row2.length) { row2[i] = factory.get(0, 0); } if (i < row3.length) { row3[i] = factory.get(0, 0); } row4[i] = factory.get(0, 0); } } @Before public void setUp() throws Exception { for (int i = 0; i < 16; i++) { if (i < row1.length) { row1[i].resetSlideInOffset(); } if (i < row2.length) { row2[i].resetSlideInOffset(); } if (i < row3.length) { row3[i].resetSlideInOffset(); } row4[i].resetSlideInOffset(); } result1 = row1.clone(); result2 = row2.clone(); result3 = row3.clone(); result4 = row4.clone(); } @Test public void testFull() throws Exception { //The operation Field.minimizeGaps(result1); Field.minimizeGaps(result2); Field.minimizeGaps(result3); Field.minimizeGaps(result4); //Assert no change assertArrayEquals(row1, result1); assertArrayEquals(row2, result2); assertArrayEquals(row3, result3); assertArrayEquals(row4, result4); } @Test public void testSimpleGameOdd() throws Exception { //Insert blanks //XXX___XX result2[3] = factory.getBlank(0, 0); result2[4] = factory.getBlank(0, 0); result2[5] = factory.getBlank(0, 0); //XXX_____XXXX result3[3] = factory.getBlank(0, 0); result3[4] = factory.getBlank(0, 0); result3[5] = factory.getBlank(0, 0); result3[6] = factory.getBlank(0, 0); result3[7] = factory.getBlank(0, 0); //XXX_______XXXXXX result4[3] = factory.getBlank(0, 0); result4[4] = factory.getBlank(0, 0); result4[5] = factory.getBlank(0, 0); result4[6] = factory.getBlank(0, 0); result4[7] = factory.getBlank(0, 0); result4[8] = factory.getBlank(0, 0); result4[9] = factory.getBlank(0, 0); //The operation Field.minimizeGaps(result2); Field.minimizeGaps(result3); Field.minimizeGaps(result4); //Assert the there is one blank left assertTrue(result2[3].isBlank()); assertTrue(result3[3].isBlank()); assertTrue(result4[3].isBlank()); //Assert that tiles stayed in their place for (int i = 0; i < 3; i++) { assertEquals(row2[i], result2[i]); assertEquals(row3[i], result3[i]); assertEquals(row4[i], result4[i]); assertEquals(0, result2[i].getSlideInOffset()); assertEquals(0, result3[i].getSlideInOffset()); assertEquals(0, result4[i].getSlideInOffset()); } //Assert that the tiles properly shifted for (int i = 6; i < row2.length; i++) { assertEquals(row2[i], result2[i - 2]); assertEquals(2, result2[i - 2].getSlideInOffset()); } for (int i = 8; i < row3.length; i++) { assertEquals(row3[i], result3[i - 4]); assertEquals(4, result3[i - 4].getSlideInOffset()); } for (int i = 10; i < row4.length; i++) { assertEquals(row4[i], result4[i - 6]); assertEquals(6, result4[i - 6].getSlideInOffset()); } //Assert that everything is null for (int i = 0; i < 2; i++) { assertNull(result2[result2.length - i - 1]); } for (int i = 0; i < 4; i++) { assertNull(result3[result3.length - i - 1]); } for (int i = 0; i < 6; i++) { assertNull(result4[result4.length - i - 1]); } } @Test public void testSimpleGameEven() throws Exception { //Insert blanks //XXXX__XX result2[4] = factory.getBlank(0, 0); result2[5] = factory.getBlank(0, 0); //XXXX____XXXX result3[4] = factory.getBlank(0, 0); result3[5] = factory.getBlank(0, 0); result3[6] = factory.getBlank(0, 0); result3[7] = factory.getBlank(0, 0); //XXXX______XXXXXX result4[4] = factory.getBlank(0, 0); result4[5] = factory.getBlank(0, 0); result4[6] = factory.getBlank(0, 0); result4[7] = factory.getBlank(0, 0); result4[8] = factory.getBlank(0, 0); result4[9] = factory.getBlank(0, 0); //The operation Field.minimizeGaps(result2); Field.minimizeGaps(result3); Field.minimizeGaps(result4); //Assert the there is one blank left assertFalse(result2[3].isBlank()); assertFalse(result3[3].isBlank()); assertFalse(result4[3].isBlank()); //Assert that tiles stayed in their place for (int i = 0; i < 4; i++) { assertEquals(row2[i], result2[i]); assertEquals(row3[i], result3[i]); assertEquals(row4[i], result4[i]); assertEquals(0, result2[i].getSlideInOffset()); assertEquals(0, result3[i].getSlideInOffset()); assertEquals(0, result4[i].getSlideInOffset()); } //Assert that the tiles properly shifted for (int i = 6; i < row2.length; i++) { assertEquals(row2[i], result2[i - 2]); assertEquals(2, result2[i - 2].getSlideInOffset()); } for (int i = 8; i < row3.length; i++) { assertEquals(row3[i], result3[i - 4]); assertEquals(4, result3[i - 4].getSlideInOffset()); } for (int i = 10; i < row4.length; i++) { assertEquals(row4[i], result4[i - 6]); assertEquals(6, result4[i - 6].getSlideInOffset()); } //Assert that everything is null for (int i = 0; i < 2; i++) { assertNull(result2[result2.length - i - 1]); } for (int i = 0; i < 4; i++) { assertNull(result3[result3.length - i - 1]); } for (int i = 0; i < 6; i++) { assertNull(result4[result4.length - i - 1]); } } @Test public void testEmptyRows() throws Exception { //Insert blanks for (int i = 0; i < 16; i++) { if (i < result1.length) { result1[i] = factory.getBlank(0, 0); } if (i < result2.length) { result2[i] = factory.getBlank(0, 0); } if (i < result3.length) { result3[i] = factory.getBlank(0, 0); } result4[i] = factory.getBlank(0, 0); } //The operation Field.minimizeGaps(result1); Field.minimizeGaps(result2); Field.minimizeGaps(result3); Field.minimizeGaps(result4); //Assert that everything is null for (int i = 0; i < 16; i++) { if (i < result1.length) { assertNull(result1[i]); } if (i < result2.length) { assertNull(result2[i]); } if (i < result3.length) { assertNull(result3[i]); } assertNull(result4[i]); } } @Test public void testBeginningOdd() throws Exception { //Insert blanks //___X result1[0] = factory.getBlank(0, 0); result1[1] = factory.getBlank(0, 0); result1[2] = factory.getBlank(0, 0); //___XXXXX result2[0] = factory.getBlank(0, 0); result2[1] = factory.getBlank(0, 0); result2[2] = factory.getBlank(0, 0); //_____XXXXXXX result3[0] = factory.getBlank(0, 0); result3[1] = factory.getBlank(0, 0); result3[2] = factory.getBlank(0, 0); result3[3] = factory.getBlank(0, 0); result3[4] = factory.getBlank(0, 0); //_______XXXXXXXXX result4[0] = factory.getBlank(0, 0); result4[1] = factory.getBlank(0, 0); result4[2] = factory.getBlank(0, 0); result4[3] = factory.getBlank(0, 0); result4[4] = factory.getBlank(0, 0); result4[5] = factory.getBlank(0, 0); result4[6] = factory.getBlank(0, 0); //The operation Field.minimizeGaps(result1); Field.minimizeGaps(result2); Field.minimizeGaps(result3); Field.minimizeGaps(result4); //Assert the there is one blank left assertTrue(result1[0].isBlank()); assertTrue(result2[0].isBlank()); assertTrue(result3[0].isBlank()); assertTrue(result4[0].isBlank()); //Assert that the tiles properly shifted assertEquals(row1[3], result1[1]); assertEquals(2, result1[1].getSlideInOffset()); for (int i = 3; i < row2.length; i++) { assertEquals(row2[i], result2[i - 2]); assertEquals(2, result2[i - 2].getSlideInOffset()); } for (int i = 5; i < row3.length; i++) { assertEquals(row3[i], result3[i - 4]); assertEquals(4, result3[i - 4].getSlideInOffset()); } for (int i = 7; i < row4.length; i++) { assertEquals(row4[i], result4[i - 6]); assertEquals(6, result4[i - 6].getSlideInOffset()); } //Assert that the end is null for (int i = 0; i < 2; i++) { assertNull(result1[result1.length - i - 1]); assertNull(result2[result2.length - i - 1]); } for (int i = 0; i < 4; i++) { assertNull(result3[result3.length - i - 1]); } for (int i = 0; i < 6; i++) { assertNull(result4[result4.length - i - 1]); } } @Test public void testEndOdd() throws Exception { //Insert blanks //X___ result1[1] = factory.getBlank(0, 0); result1[2] = factory.getBlank(0, 0); result1[3] = factory.getBlank(0, 0); //XXXXX___ result2[5] = factory.getBlank(0, 0); result2[6] = factory.getBlank(0, 0); result2[7] = factory.getBlank(0, 0); //XXXXXXX_____ result3[7] = factory.getBlank(0, 0); result3[8] = factory.getBlank(0, 0); result3[9] = factory.getBlank(0, 0); result3[10] = factory.getBlank(0, 0); result3[11] = factory.getBlank(0, 0); //XXXXXXXXX_______ result4[9] = factory.getBlank(0, 0); result4[10] = factory.getBlank(0, 0); result4[11] = factory.getBlank(0, 0); result4[12] = factory.getBlank(0, 0); result4[13] = factory.getBlank(0, 0); result4[14] = factory.getBlank(0, 0); result4[15] = factory.getBlank(0, 0); //The operation Field.minimizeGaps(result1); Field.minimizeGaps(result2); Field.minimizeGaps(result3); Field.minimizeGaps(result4); //Assert the there is one blank left assertTrue(result1[1].isBlank()); assertTrue(result2[5].isBlank()); assertTrue(result3[7].isBlank()); assertTrue(result4[9].isBlank()); //Assert that tiles stayed in their place assertEquals(row1[0], result1[0]); assertEquals(0, result1[0].getSlideInOffset()); for (int i = 0; i < 5; i++) { assertEquals(row2[i], result2[i]); assertEquals(0, result2[i].getSlideInOffset()); } for (int i = 0; i < 7; i++) { assertEquals(row3[i], result3[i]); assertEquals(0, result3[i].getSlideInOffset()); } for (int i = 0; i < 9; i++) { assertEquals(row4[i], result4[i]); assertEquals(0, result4[i].getSlideInOffset()); } //Assert that the end is null for (int i = 0; i < 2; i++) { assertNull(result1[result1.length - i - 1]); assertNull(result2[result2.length - i - 1]); } for (int i = 0; i < 4; i++) { assertNull(result3[result3.length - i - 1]); } for (int i = 0; i < 6; i++) { assertNull(result4[result4.length - i - 1]); } } @Test public void testBeginningEven() throws Exception { //Insert blanks //__XX result1[0] = factory.getBlank(0, 0); result1[1] = factory.getBlank(0, 0); //__XXXXXX result2[0] = factory.getBlank(0, 0); result2[1] = factory.getBlank(0, 0); //____XXXXXXXX result3[0] = factory.getBlank(0, 0); result3[1] = factory.getBlank(0, 0); result3[2] = factory.getBlank(0, 0); result3[3] = factory.getBlank(0, 0); //______XXXXXXXXXX result4[0] = factory.getBlank(0, 0); result4[1] = factory.getBlank(0, 0); result4[2] = factory.getBlank(0, 0); result4[3] = factory.getBlank(0, 0); result4[4] = factory.getBlank(0, 0); result4[5] = factory.getBlank(0, 0); //The operation Field.minimizeGaps(result1); Field.minimizeGaps(result2); Field.minimizeGaps(result3); Field.minimizeGaps(result4); //Assert that the tiles properly shifted assertEquals(row1[2], result1[0]); assertEquals(2, result1[0].getSlideInOffset()); assertEquals(row1[3], result1[1]); assertEquals(2, result1[1].getSlideInOffset()); for (int i = 2; i < row2.length; i++) { assertEquals(row2[i], result2[i - 2]); assertEquals(2, result2[i - 2].getSlideInOffset()); } for (int i = 4; i < row3.length; i++) { assertEquals(row3[i], result3[i - 4]); assertEquals(4, result3[i - 4].getSlideInOffset()); } for (int i = 6; i < row4.length; i++) { assertEquals(row4[i], result4[i - 6]); assertEquals(6, result4[i - 6].getSlideInOffset()); } //Assert that the end is null for (int i = 0; i < 2; i++) { assertNull(result1[result1.length - i - 1]); assertNull(result2[result2.length - i - 1]); } for (int i = 0; i < 4; i++) { assertNull(result3[result3.length - i - 1]); } for (int i = 0; i < 6; i++) { assertNull(result4[result4.length - i - 1]); } } @Test public void testEndEven() throws Exception { //Insert blanks //XX__ result1[2] = factory.getBlank(0, 0); result1[3] = factory.getBlank(0, 0); //XXXXXX__ result2[6] = factory.getBlank(0, 0); result2[7] = factory.getBlank(0, 0); //XXXXXXXX____ result3[8] = factory.getBlank(0, 0); result3[9] = factory.getBlank(0, 0); result3[10] = factory.getBlank(0, 0); result3[11] = factory.getBlank(0, 0); //XXXXXXXXXX______ result4[10] = factory.getBlank(0, 0); result4[11] = factory.getBlank(0, 0); result4[12] = factory.getBlank(0, 0); result4[13] = factory.getBlank(0, 0); result4[14] = factory.getBlank(0, 0); result4[15] = factory.getBlank(0, 0); //The operation Field.minimizeGaps(result1); Field.minimizeGaps(result2); Field.minimizeGaps(result3); Field.minimizeGaps(result4); //Assert that tiles stayed in their place assertEquals(row1[0], result1[0]); assertEquals(0, result1[0].getSlideInOffset()); assertEquals(row1[1], result1[1]); assertEquals(0, result1[1].getSlideInOffset()); for (int i = 0; i < 6; i++) { assertEquals(row2[i], result2[i]); assertEquals(0, result2[i].getSlideInOffset()); } for (int i = 0; i < 8; i++) { assertEquals(row3[i], result3[i]); assertEquals(0, result3[i].getSlideInOffset()); } for (int i = 0; i < 10; i++) { assertEquals(row4[i], result4[i]); assertEquals(0, result4[i].getSlideInOffset()); } //Assert that the end is null for (int i = 0; i < 2; i++) { assertNull(result1[result1.length - i - 1]); assertNull(result2[result2.length - i - 1]); } for (int i = 0; i < 4; i++) { assertNull(result3[result3.length - i - 1]); } for (int i = 0; i < 6; i++) { assertNull(result4[result4.length - i - 1]); } } @Test public void testIgnoreSingleGaps() throws Exception { //Insert blanks //XX_X result1[2] = factory.getBlank(0, 0); //XXX_XXXX result2[3] = factory.getBlank(0, 0); //XX_XXXX_XXXX result3[2] = factory.getBlank(0, 0); result3[7] = factory.getBlank(0, 0); //XX_XXX____XXX_XX result4[2] = factory.getBlank(0, 0); result4[6] = factory.getBlank(0, 0); result4[7] = factory.getBlank(0, 0); result4[8] = factory.getBlank(0, 0); result4[9] = factory.getBlank(0, 0); result4[13] = factory.getBlank(0, 0); //The operation Field.minimizeGaps(result1); Field.minimizeGaps(result2); Field.minimizeGaps(result3); Field.minimizeGaps(result4); assertTrue(result1[2].isBlank()); assertTrue(result2[3].isBlank()); assertTrue(result3[2].isBlank()); assertTrue(result3[7].isBlank()); assertTrue(result4[2].isBlank()); assertTrue(result4[9].isBlank()); } @Test public void testIgnoreSingleGapBeginning() throws Exception { //XXX_XXXX result2[0] = factory.getBlank(0, 0); //XX_XXX____XX result3[0] = factory.getBlank(0, 0); result3[6] = factory.getBlank(0, 0); result3[7] = factory.getBlank(0, 0); result3[8] = factory.getBlank(0, 0); result3[9] = factory.getBlank(0, 0); Field.minimizeGaps(result2); Field.minimizeGaps(result3); assertTrue(result2[0].isBlank()); assertTrue(result3[0].isBlank()); } @Test public void testOddAndEven() throws Exception { //XX___XXX____XXXX result4[2] = factory.getBlank(0, 0); result4[3] = factory.getBlank(0, 0); result4[4] = factory.getBlank(0, 0); result4[8] = factory.getBlank(0, 0); result4[9] = factory.getBlank(0, 0); result4[10] = factory.getBlank(0, 0); result4[11] = factory.getBlank(0, 0); Field.minimizeGaps(result4); assertEquals(row4[0], result4[0]); assertEquals(row4[1], result4[1]); assertTrue(result4[2].isBlank()); assertEquals(row4[5], result4[3]); assertEquals(row4[6], result4[4]); assertEquals(row4[7], result4[5]); assertEquals(row4[12], result4[6]); assertEquals(row4[13], result4[7]); assertEquals(row4[14], result4[8]); assertEquals(row4[15], result4[9]); for (int i = 0; i < 6; i++) { assertNull(result4[result4.length - i - 1]); } } @Test public void testOneRemainingEnd() throws Exception { //Insert blanks for (int i = 0; i < 15; i++) { if (i < result1.length - 1) { result1[i] = factory.getBlank(0, 0); } if (i < result2.length - 1) { result2[i] = factory.getBlank(0, 0); } if (i < result3.length - 1) { result3[i] = factory.getBlank(0, 0); } result4[i] = factory.getBlank(0, 0); } //The operation Field.minimizeGaps(result1); Field.minimizeGaps(result2); Field.minimizeGaps(result3); Field.minimizeGaps(result4); assertTrue(result1[0].isBlank()); assertTrue(result2[0].isBlank()); assertTrue(result3[0].isBlank()); assertTrue(result4[0].isBlank()); assertFalse(result1[1].isBlank()); assertFalse(result2[1].isBlank()); assertFalse(result3[1].isBlank()); assertFalse(result4[1].isBlank()); //Insert blanks for (int i = 2; i < 16; i++) { if (i < result1.length) { assertNull(result1[i]); } if (i < result2.length) { assertNull(result2[i]); } if (i < result3.length) { assertNull(result3[i]); } assertNull(result4[i]); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cometd.bayeux; import java.util.List; /** Bayeux Interface.<br> * This interface represents the server side API for the Bayeux messaging protocol. * Bayeux is a simple subscribe/publish/receive methodology, not far from JMS, but much simplified.<br> * It is used both by the actual implementation and by server side clients.<br> * Server side clients use this to create, retrieve and subscribe to channels. * Server side clients are represented, just like remote clients, through the Client interface. * <br> * The Bayeux implementations is intended to be thread safe and multiple threads may simultaneously call Bayeux methods. * <br> * The Bayeux object, is the starting point for any cometd application relying on the Bayeux object. * Dependent on the container, the Bayeux object will be stored in the <code>javax.servlet.ServletContext</code> object * as an attribute under the name <code>Bayeux.DOJOX_COMETD_BAYEUX</code><br> * To retrieve this object, one would simply call<br> * <code>Bayeux bx = (Bayeux)getServletContext().getAttribute(Bayeux.DOJOX_COMETD_BAYEUX); * <br><br> * The Bayeux protocol is pretty straight forward and includes a bunch of messaging that is not needed to be known to clients, * both server side and remote clients. * This object gets initialized by a container dependent servlet, and the servlet then handles all Bayeux communication from the client. * Remote messsages are delivered to channels, and to server side clients using the <code>Listener</code> interface.<br> * <br> * A <code>Bayeux session</code> is active as long as the webapp hosting the Bayeux object is active.<br> * When the webapplication shuts down, the Bayeux object will unsubscribe all clients and remove all the active channels. * * @author Greg Wilkins */ public interface Bayeux { /**Meta definitions for channels*/ public static final String META="/meta"; /**Meta definitions for channels*/ public static final String META_SLASH="/meta/"; /**Meta definitions for channels - connect message*/ public static final String META_CONNECT="/meta/connect"; /**Meta definitions for channels - client messsage*/ public static final String META_CLIENT="/meta/client"; /**Meta definitions for channels - disconnect messsage*/ public static final String META_DISCONNECT="/meta/disconnect"; /**Meta definitions for channels - handshake messsage*/ public static final String META_HANDSHAKE="/meta/handshake"; /**Meta definitions for channels - ping messsage*/ public static final String META_PING="/meta/ping"; /**Meta definitions for channels - reconnect messsage * @deprecated */ public static final String META_RECONNECT="/meta/reconnect"; /**Meta definitions for channels - status messsage*/ public static final String META_STATUS="/meta/status"; /**Meta definitions for channels - subscribe messsage*/ public static final String META_SUBSCRIBE="/meta/subscribe"; /**Meta definitions for channels - unsubscribe messsage*/ public static final String META_UNSUBSCRIBE="/meta/unsubscribe"; /*Field names inside Bayeux messages*/ /**Field names inside Bayeux messages - clientId field*/ public static final String CLIENT_FIELD="clientId"; /**Field names inside Bayeux messages - data field*/ public static final String DATA_FIELD="data"; /**Field names inside Bayeux messages - channel field*/ public static final String CHANNEL_FIELD="channel"; /**Field names inside Bayeux messages - id field*/ public static final String ID_FIELD="id"; /**Field names inside Bayeux messages - error field*/ public static final String ERROR_FIELD="error"; /**Field names inside Bayeux messages - timestamp field*/ public static final String TIMESTAMP_FIELD="timestamp"; /**Field names inside Bayeux messages - transport field*/ public static final String TRANSPORT_FIELD="transport"; /**Field names inside Bayeux messages - advice field*/ public static final String ADVICE_FIELD="advice"; /**Field names inside Bayeux messages - successful field*/ public static final String SUCCESSFUL_FIELD="successful"; /**Field names inside Bayeux messages - subscription field*/ public static final String SUBSCRIPTION_FIELD="subscription"; /**Field names inside Bayeux messages - ext field*/ public static final String EXT_FIELD="ext"; /**Field names inside Bayeux messages - connectionType field*/ public static final String CONNECTION_TYPE_FIELD="connectionType"; /**Field names inside Bayeux messages - version field*/ public static final String VERSION_FIELD="version"; /**Field names inside Bayeux messages - minimumVersion field*/ public static final String MIN_VERSION_FIELD="minimumVersion"; /**Field names inside Bayeux messages - supportedConnectionTypes field*/ public static final String SUPP_CONNECTION_TYPE_FIELD="supportedConnectionTypes"; /**Field names inside Bayeux messages - json-comment-filtered field*/ public static final String JSON_COMMENT_FILTERED_FIELD="json-comment-filtered"; /**Field names inside Bayeux messages - reconnect field*/ public static final String RECONNECT_FIELD = "reconnect"; /**Field names inside Bayeux messages - interval field*/ public static final String INTERVAL_FIELD = "interval"; /**Field values inside Bayeux messages - retry response*/ public static final String RETRY_RESPONSE = "retry"; /**Field values inside Bayeux messages - handshake response*/ public static final String HANDSHAKE_RESPONSE = "handshake"; /**Field values inside Bayeux messages - none response*/ public static final String NONE_RESPONSE = "none"; /**Service channel names-starts with*/ public static final String SERVICE="/service"; /**Service channel names-trailing slash*/ public static final String SERVICE_SLASH="/service/"; /*Transport types*/ /**Transport types - long polling*/ public static final String TRANSPORT_LONG_POLL="long-polling"; /**Transport types - callback polling*/ public static final String TRANSPORT_CALLBACK_POLL="callback-polling"; /**Transport types - iframe*/ public static final String TRANSPORT_IFRAME="iframe"; /**Transport types - flash*/ public static final String TRANSPORT_FLASH="flash"; /** ServletContext attribute name used to obtain the Bayeux object */ public static final String DOJOX_COMETD_BAYEUX="dojox.cometd.bayeux"; /*http field names*/ /**http helpers - text/json content type*/ public static final String JSON_CONTENT_TYPE="text/json"; /**http helpers - parameter name for json message*/ public static final String MESSAGE_PARAMETER="message"; /**http helpers - name of the jsonp parameter*/ public static final String JSONP_PARAMETER="jsonp"; /**http helpers - default name of the jsonp callback function*/ public static final String JSONP_DEFAULT_NAME="jsonpcallback"; /*--Client----------------------------------------------------------- */ /** * Creates a new server side client. This method is to be invoked * by server side objects only. You cannot create a remote client by using this method. * A client represents an entity that can subscribe to channels and publish and receive messages * through these channels * @param idprefix String - the prefix string for the id generated, can be null * @param listener Listener - a callback object to be called when messages are to be delivered to the new client * @return Client - returns an implementation of the client interface. */ public Client newClient(String idprefix, Listener listener); /** * retrieve a client based on an ID. Will return null if the client doesn't exist. * @param clientid String * @return Client-null if the client doesn't exist.returns the client if it does. */ public Client getClient(String clientid); /** * Returns a non modifiable list of all the clients that are currently active * in this Bayeux session * @return List<Client> - a list containing all clients. The List can not be modified. */ public List<Client> getClients(); /** * Returns true if a client with the given id exists.<br> * Same as executing <code>getClient(id)!=null</code>. * @param clientId String * @return boolean - true if the client exists */ public boolean hasClient(String clientId); /** * Removes the client all together. * This will unsubscribe the client to any channels it may be subscribed to * and remove it from the list. * @param client Client * @return Client - returns the client that was removed, or null if no client was removed. */ public Client remove(Client client); /*--Channel---------------------------------------------------------- */ /** * Returns the channel for a given channel id. * If the channel doesn't exist, and the <code>create</code> parameter is set to true, * the channel will be created and added to the list of active channels.<br> * if <code>create</code> is set to false, and the channel doesn't exist, null will be returned. * @param channelId String - the id of the channel to be retrieved or created * @param create boolean - true if the Bayeux impl should create the channel * @return Channel - null if <code>create</code> is set to false and the channel doesn't exist, * otherwise it returns a channel object. */ public Channel getChannel(String channelId, boolean create); /** * Returns a list of currently active channels in this Bayeux session. * @return List<Channel> */ public List<Channel> getChannels(); /** * Removes a channel from the Bayeux object. * This will also unsubscribe all the clients currently subscribed to the * the channel. * @param channel Channel - the channel to be removed * @return Channel - returns the channel that was removed, or null if no channel was removed. */ public Channel remove(Channel channel); /** * returns true if a channel with the given channelId exists. * <br>Same as executing <code>Bayeux.getChannel(channelId,false)!=null</code> * @param channelId String * @return boolean - true if the channel exists. */ public boolean hasChannel(String channelId); /* --Message---------------------------------------------------------- */ /** * Creates a new message to be sent by a server side client. * @return Message - returns a new Message object, that has a unique id. */ public Message newMessage(Client from); /*--Security policy----------------------------------------------------------- */ /** * Returns the security policy associated with this Bayeux session * @return SecurityPolicy */ public SecurityPolicy getSecurityPolicy(); /** * Sets the security policy to be used in this Bayeux session * @param securityPolicy SecurityPolicy */ public void setSecurityPolicy(SecurityPolicy securityPolicy); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.fn.harness; import java.util.EnumMap; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.function.Function; import org.apache.beam.fn.harness.control.AddHarnessIdInterceptor; import org.apache.beam.fn.harness.control.BeamFnControlClient; import org.apache.beam.fn.harness.control.ProcessBundleHandler; import org.apache.beam.fn.harness.control.RegisterHandler; import org.apache.beam.fn.harness.data.BeamFnDataGrpcClient; import org.apache.beam.fn.harness.logging.BeamFnLoggingClient; import org.apache.beam.fn.harness.state.BeamFnStateGrpcClientCache; import org.apache.beam.fn.harness.stream.HarnessStreamObserverFactories; import org.apache.beam.model.fnexecution.v1.BeamFnApi; import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionRequest; import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionResponse.Builder; import org.apache.beam.model.pipeline.v1.Endpoints; import org.apache.beam.runners.core.construction.PipelineOptionsTranslation; import org.apache.beam.sdk.extensions.gcp.options.GcsOptions; import org.apache.beam.sdk.fn.IdGenerator; import org.apache.beam.sdk.fn.IdGenerators; import org.apache.beam.sdk.fn.JvmInitializers; import org.apache.beam.sdk.fn.channel.ManagedChannelFactory; import org.apache.beam.sdk.fn.stream.OutboundObserverFactory; import org.apache.beam.sdk.function.ThrowingFunction; import org.apache.beam.sdk.io.FileSystems; import org.apache.beam.sdk.options.ExperimentalOptions; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.TextFormat; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Main entry point into the Beam SDK Fn Harness for Java. * * <p>This entry point expects the following environment variables: * * <ul> * <li>HARNESS_ID: A String representing the ID of this FnHarness. This will be added to the * headers of calls to the Beam Control Service * <li>LOGGING_API_SERVICE_DESCRIPTOR: A {@link * org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor} encoded as text * representing the endpoint that is to be connected to for the Beam Fn Logging service. * <li>CONTROL_API_SERVICE_DESCRIPTOR: A {@link Endpoints.ApiServiceDescriptor} encoded as text * representing the endpoint that is to be connected to for the Beam Fn Control service. * <li>PIPELINE_OPTIONS: A serialized form of {@link PipelineOptions}. See {@link PipelineOptions} * for further details. * </ul> */ public class FnHarness { private static final String HARNESS_ID = "HARNESS_ID"; private static final String CONTROL_API_SERVICE_DESCRIPTOR = "CONTROL_API_SERVICE_DESCRIPTOR"; private static final String LOGGING_API_SERVICE_DESCRIPTOR = "LOGGING_API_SERVICE_DESCRIPTOR"; private static final String PIPELINE_OPTIONS = "PIPELINE_OPTIONS"; private static final Logger LOG = LoggerFactory.getLogger(FnHarness.class); private static Endpoints.ApiServiceDescriptor getApiServiceDescriptor(String descriptor) throws TextFormat.ParseException { Endpoints.ApiServiceDescriptor.Builder apiServiceDescriptorBuilder = Endpoints.ApiServiceDescriptor.newBuilder(); TextFormat.merge(descriptor, apiServiceDescriptorBuilder); return apiServiceDescriptorBuilder.build(); } public static void main(String[] args) throws Exception { main(System::getenv); } @VisibleForTesting public static void main(Function<String, String> environmentVarGetter) throws Exception { JvmInitializers.runOnStartup(); System.out.format("SDK Fn Harness started%n"); System.out.format("Harness ID %s%n", environmentVarGetter.apply(HARNESS_ID)); System.out.format( "Logging location %s%n", environmentVarGetter.apply(LOGGING_API_SERVICE_DESCRIPTOR)); System.out.format( "Control location %s%n", environmentVarGetter.apply(CONTROL_API_SERVICE_DESCRIPTOR)); System.out.format("Pipeline options %s%n", environmentVarGetter.apply(PIPELINE_OPTIONS)); String id = environmentVarGetter.apply(HARNESS_ID); PipelineOptions options = PipelineOptionsTranslation.fromJson(environmentVarGetter.apply(PIPELINE_OPTIONS)); Endpoints.ApiServiceDescriptor loggingApiServiceDescriptor = getApiServiceDescriptor(environmentVarGetter.apply(LOGGING_API_SERVICE_DESCRIPTOR)); Endpoints.ApiServiceDescriptor controlApiServiceDescriptor = getApiServiceDescriptor(environmentVarGetter.apply(CONTROL_API_SERVICE_DESCRIPTOR)); main(id, options, loggingApiServiceDescriptor, controlApiServiceDescriptor); } /** * Run a FnHarness with the given id and options that attaches to the specified logging and * control API service descriptors. * * @param id Harness ID * @param options The options for this pipeline * @param loggingApiServiceDescriptor * @param controlApiServiceDescriptor * @throws Exception */ public static void main( String id, PipelineOptions options, Endpoints.ApiServiceDescriptor loggingApiServiceDescriptor, Endpoints.ApiServiceDescriptor controlApiServiceDescriptor) throws Exception { ManagedChannelFactory channelFactory; List<String> experiments = options.as(ExperimentalOptions.class).getExperiments(); if (experiments != null && experiments.contains("beam_fn_api_epoll")) { channelFactory = ManagedChannelFactory.createEpoll(); } else { channelFactory = ManagedChannelFactory.createDefault(); } OutboundObserverFactory outboundObserverFactory = HarnessStreamObserverFactories.fromOptions(options); channelFactory = channelFactory.withInterceptors(ImmutableList.of(AddHarnessIdInterceptor.create(id))); main( id, options, loggingApiServiceDescriptor, controlApiServiceDescriptor, channelFactory, outboundObserverFactory); } /** * Run a FnHarness with the given id and options that attaches to the specified logging and * control API service descriptors using the given channel factory and outbound observer factory. * * @param id Harness ID * @param options The options for this pipeline * @param loggingApiServiceDescriptor * @param controlApiServiceDescriptor * @param channelFactory * @param outboundObserverFactory * @throws Exception */ public static void main( String id, PipelineOptions options, Endpoints.ApiServiceDescriptor loggingApiServiceDescriptor, Endpoints.ApiServiceDescriptor controlApiServiceDescriptor, ManagedChannelFactory channelFactory, OutboundObserverFactory outboundObserverFactory) throws Exception { IdGenerator idGenerator = IdGenerators.decrementingLongs(); ExecutorService executorService = options.as(GcsOptions.class).getExecutorService(); // The logging client variable is not used per se, but during its lifetime (until close()) it // intercepts logging and sends it to the logging service. try (BeamFnLoggingClient logging = new BeamFnLoggingClient( options, loggingApiServiceDescriptor, channelFactory::forDescriptor)) { LOG.info("Fn Harness started"); // Register standard file systems. FileSystems.setDefaultPipelineOptions(options); EnumMap< BeamFnApi.InstructionRequest.RequestCase, ThrowingFunction<InstructionRequest, Builder>> handlers = new EnumMap<>(BeamFnApi.InstructionRequest.RequestCase.class); RegisterHandler fnApiRegistry = new RegisterHandler(); BeamFnDataGrpcClient beamFnDataMultiplexer = new BeamFnDataGrpcClient(options, channelFactory::forDescriptor, outboundObserverFactory); BeamFnStateGrpcClientCache beamFnStateGrpcClientCache = new BeamFnStateGrpcClientCache( idGenerator, channelFactory::forDescriptor, outboundObserverFactory); ProcessBundleHandler processBundleHandler = new ProcessBundleHandler( options, fnApiRegistry::getById, beamFnDataMultiplexer, beamFnStateGrpcClientCache); handlers.put(BeamFnApi.InstructionRequest.RequestCase.REGISTER, fnApiRegistry::register); // TODO(BEAM-6597): Collect MonitoringInfos in ProcessBundleProgressResponses. handlers.put( BeamFnApi.InstructionRequest.RequestCase.PROCESS_BUNDLE, processBundleHandler::processBundle); BeamFnControlClient control = new BeamFnControlClient( id, controlApiServiceDescriptor, channelFactory, outboundObserverFactory, handlers); JvmInitializers.runBeforeProcessing(options); LOG.info("Entering instruction processing loop"); control.processInstructionRequests(executorService); processBundleHandler.shutdown(); } finally { System.out.println("Shutting SDK harness down."); executorService.shutdown(); } } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Created by IntelliJ IDEA. * User: dsl * Date: 06.05.2002 * Time: 13:36:30 * To change template for new class use * Code Style | Class Templates options (Tools | IDE Options). */ package com.intellij.refactoring.introduceParameter; import com.google.common.annotations.VisibleForTesting; import com.intellij.codeInsight.CodeInsightUtil; import com.intellij.codeInsight.FunctionalInterfaceSuggester; import com.intellij.codeInsight.completion.JavaCompletionUtil; import com.intellij.codeInsight.navigation.NavigationUtil; import com.intellij.ide.util.PsiClassListCellRenderer; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.editor.ScrollType; import com.intellij.openapi.editor.SelectionModel; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.editor.colors.EditorColorsManager; import com.intellij.openapi.editor.markup.*; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.JBPopup; import com.intellij.openapi.ui.popup.JBPopupAdapter; import com.intellij.openapi.ui.popup.JBPopupFactory; import com.intellij.openapi.ui.popup.LightweightWindowEvent; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.Pass; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.codeStyle.SuggestedNameInfo; import com.intellij.psi.codeStyle.VariableKind; import com.intellij.psi.search.PsiElementProcessor; import com.intellij.psi.util.PsiFormatUtil; import com.intellij.psi.util.PsiFormatUtilBase; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.HelpID; import com.intellij.refactoring.IntroduceHandlerBase; import com.intellij.refactoring.IntroduceParameterRefactoring; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.extractMethod.AbstractExtractDialog; import com.intellij.refactoring.extractMethod.ExtractMethodProcessor; import com.intellij.refactoring.extractMethod.InputVariables; import com.intellij.refactoring.extractMethod.PrepareFailedException; import com.intellij.refactoring.introduce.inplace.AbstractInplaceIntroducer; import com.intellij.refactoring.introduceField.ElementToWorkOn; import com.intellij.refactoring.ui.MethodCellRenderer; import com.intellij.refactoring.ui.NameSuggestionsGenerator; import com.intellij.refactoring.ui.TypeSelectorManagerImpl; import com.intellij.refactoring.util.CommonRefactoringUtil; import com.intellij.refactoring.util.RefactoringUtil; import com.intellij.refactoring.util.VariableData; import com.intellij.refactoring.util.occurrences.ExpressionOccurrenceManager; import com.intellij.ui.ScrollPaneFactory; import com.intellij.ui.components.JBList; import com.intellij.usageView.UsageInfo; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.PairConsumer; import gnu.trove.TIntArrayList; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.KeyEvent; import java.util.*; import java.util.List; public class IntroduceParameterHandler extends IntroduceHandlerBase { private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.introduceParameter.IntroduceParameterHandler"); static final String REFACTORING_NAME = RefactoringBundle.message("introduce.parameter.title"); private JBPopup myEnclosingMethodsPopup; private InplaceIntroduceParameterPopup myInplaceIntroduceParameterPopup; public void invoke(@NotNull final Project project, final Editor editor, final PsiFile file, DataContext dataContext) { PsiDocumentManager.getInstance(project).commitAllDocuments(); editor.getScrollingModel().scrollToCaret(ScrollType.MAKE_VISIBLE); ElementToWorkOn.processElementToWorkOn(editor, file, REFACTORING_NAME, HelpID.INTRODUCE_PARAMETER, project, new ElementToWorkOn.ElementsProcessor<ElementToWorkOn>() { @Override public boolean accept(ElementToWorkOn el) { return true; } @Override public void pass(final ElementToWorkOn elementToWorkOn) { if (elementToWorkOn == null) { return; } if (elementToWorkOn.getLocalVariable() == null && elementToWorkOn.getExpression() == null) { if (!introduceStrategy(project, editor, file)) { ElementToWorkOn.showNothingSelectedErrorMessage(editor, REFACTORING_NAME, HelpID.INTRODUCE_PARAMETER, project); } return; } final PsiExpression expr = elementToWorkOn.getExpression(); final PsiLocalVariable localVar = elementToWorkOn.getLocalVariable(); final boolean isInvokedOnDeclaration = elementToWorkOn.isInvokedOnDeclaration(); invoke(editor, project, expr, localVar, isInvokedOnDeclaration); } }); } protected boolean invokeImpl(Project project, PsiExpression tempExpr, Editor editor) { return invoke(editor, project, tempExpr, null, false); } protected boolean invokeImpl(Project project, PsiLocalVariable localVariable, Editor editor) { return invoke(editor, project, null, localVariable, true); } private boolean invoke(final Editor editor, final Project project, final PsiExpression expr, PsiLocalVariable localVar, boolean invokedOnDeclaration) { LOG.assertTrue(!PsiDocumentManager.getInstance(project).hasUncommitedDocuments()); PsiMethod method; if (expr != null) { method = Util.getContainingMethod(expr); } else { method = Util.getContainingMethod(localVar); } if (LOG.isDebugEnabled()) { LOG.debug("expression:" + expr); } if (expr == null && localVar == null) { String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message("selected.block.should.represent.an.expression")); showErrorMessage(project, message, editor); return false; } if (localVar != null) { final PsiElement parent = localVar.getParent(); if (!(parent instanceof PsiDeclarationStatement)) { String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message("error.wrong.caret.position.local.or.expression.name")); showErrorMessage(project, message, editor); return false; } } if (method == null) { String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message("is.not.supported.in.the.current.context", REFACTORING_NAME)); showErrorMessage(project, message, editor); return false; } final PsiType typeByExpression = invokedOnDeclaration ? null : RefactoringUtil.getTypeByExpressionWithExpectedType(expr); if (!invokedOnDeclaration && (typeByExpression == null || LambdaUtil.notInferredType(typeByExpression))) { String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message("type.of.the.selected.expression.cannot.be.determined")); showErrorMessage(project, message, editor); return false; } if (!invokedOnDeclaration && PsiType.VOID.equals(typeByExpression)) { String message = RefactoringBundle.getCannotRefactorMessage(RefactoringBundle.message("selected.expression.has.void.type")); showErrorMessage(project, message, editor); return false; } final List<PsiMethod> validEnclosingMethods = getEnclosingMethods(method); if (validEnclosingMethods.isEmpty()) { return false; } if (!CommonRefactoringUtil.checkReadOnlyStatus(project, method)) return false; final Introducer introducer = new Introducer(project, expr, localVar, editor); final AbstractInplaceIntroducer inplaceIntroducer = AbstractInplaceIntroducer.getActiveIntroducer(editor); if (inplaceIntroducer instanceof InplaceIntroduceParameterPopup) { final InplaceIntroduceParameterPopup introduceParameterPopup = (InplaceIntroduceParameterPopup)inplaceIntroducer; introducer.introduceParameter(introduceParameterPopup.getMethodToIntroduceParameter(), introduceParameterPopup.getMethodToSearchFor()); return true; } chooseMethodToIntroduceParameter(editor, validEnclosingMethods, (methodToSearchIn, methodToSearchFor) -> introducer.introduceParameter(methodToSearchIn, methodToSearchFor)); return true; } private void chooseMethodToIntroduceParameter(final Editor editor, final List<PsiMethod> validEnclosingMethods, final PairConsumer<PsiMethod, PsiMethod> consumer) { final boolean unitTestMode = ApplicationManager.getApplication().isUnitTestMode(); if (validEnclosingMethods.size() == 1 || unitTestMode) { final PsiMethod methodToIntroduceParameterTo = validEnclosingMethods.get(0); if (methodToIntroduceParameterTo.findDeepestSuperMethod() == null || unitTestMode) { consumer.consume(methodToIntroduceParameterTo, methodToIntroduceParameterTo); return; } } final JPanel panel = new JPanel(new BorderLayout()); final JCheckBox superMethod = new JCheckBox("Refactor super method", true); superMethod.setMnemonic('U'); panel.add(superMethod, BorderLayout.SOUTH); final JBList list = new JBList(validEnclosingMethods.toArray()); list.setVisibleRowCount(5); list.setCellRenderer(new MethodCellRenderer()); list.getSelectionModel().setSelectionMode(ListSelectionModel.SINGLE_SELECTION); list.setSelectedIndex(0); final List<RangeHighlighter> highlighters = new ArrayList<>(); final TextAttributes attributes = EditorColorsManager.getInstance().getGlobalScheme().getAttributes(EditorColors.SEARCH_RESULT_ATTRIBUTES); list.addListSelectionListener(new ListSelectionListener() { public void valueChanged(final ListSelectionEvent e) { final PsiMethod selectedMethod = (PsiMethod)list.getSelectedValue(); if (selectedMethod == null) return; dropHighlighters(highlighters); updateView(selectedMethod, editor, attributes, highlighters, superMethod); } }); updateView(validEnclosingMethods.get(0), editor, attributes, highlighters, superMethod); final JScrollPane scrollPane = ScrollPaneFactory.createScrollPane(list); scrollPane.setBorder(null); panel.add(scrollPane, BorderLayout.CENTER); final List<Pair<ActionListener, KeyStroke>> keyboardActions = Collections.singletonList(Pair.<ActionListener, KeyStroke>create(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final PsiMethod methodToSearchIn = (PsiMethod)list.getSelectedValue(); if (myEnclosingMethodsPopup != null && myEnclosingMethodsPopup.isVisible()) { myEnclosingMethodsPopup.cancel(); } final PsiMethod methodToSearchFor = superMethod.isEnabled() && superMethod.isSelected() ? methodToSearchIn.findDeepestSuperMethod() : methodToSearchIn; consumer.consume(methodToSearchIn, methodToSearchFor); } }, KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, 0))); myEnclosingMethodsPopup = JBPopupFactory.getInstance().createComponentPopupBuilder(panel, list) .setTitle("Introduce parameter to method") .setMovable(false) .setResizable(false) .setRequestFocus(true) .setKeyboardActions(keyboardActions).addListener(new JBPopupAdapter() { @Override public void onClosed(LightweightWindowEvent event) { dropHighlighters(highlighters); } }).createPopup(); myEnclosingMethodsPopup.showInBestPositionFor(editor); } private static void updateView(PsiMethod selectedMethod, Editor editor, TextAttributes attributes, List<RangeHighlighter> highlighters, JCheckBox superMethod) { final MarkupModel markupModel = editor.getMarkupModel(); final PsiIdentifier nameIdentifier = selectedMethod.getNameIdentifier(); if (nameIdentifier != null) { final TextRange textRange = nameIdentifier.getTextRange(); final RangeHighlighter rangeHighlighter = markupModel.addRangeHighlighter( textRange.getStartOffset(), textRange.getEndOffset(), HighlighterLayer.SELECTION - 1, attributes, HighlighterTargetArea.EXACT_RANGE); highlighters.add(rangeHighlighter); } superMethod.setEnabled(selectedMethod.findDeepestSuperMethod() != null); } private static void dropHighlighters(List<RangeHighlighter> highlighters) { for (RangeHighlighter highlighter : highlighters) { highlighter.dispose(); } highlighters.clear(); } protected static NameSuggestionsGenerator createNameSuggestionGenerator(final PsiExpression expr, final String propName, final Project project, final String enteredName) { return new NameSuggestionsGenerator() { public SuggestedNameInfo getSuggestedNameInfo(PsiType type) { final JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(project); SuggestedNameInfo info = codeStyleManager.suggestVariableName(VariableKind.PARAMETER, propName, expr != null && expr.isValid() ? expr : null, type); if (expr != null && expr.isValid()) { info = codeStyleManager.suggestUniqueVariableName(info, expr, true); } final String[] strings = AbstractJavaInplaceIntroducer.appendUnresolvedExprName(JavaCompletionUtil .completeVariableNameForRefactoring(codeStyleManager, type, VariableKind.LOCAL_VARIABLE, info), expr); return new SuggestedNameInfo.Delegate(enteredName != null ? ArrayUtil.mergeArrays(new String[]{enteredName}, strings): strings, info); } }; } private static void showErrorMessage(Project project, String message, Editor editor) { CommonRefactoringUtil.showErrorHint(project, editor, message, REFACTORING_NAME, HelpID.INTRODUCE_PARAMETER); } public void invoke(@NotNull Project project, @NotNull PsiElement[] elements, DataContext dataContext) { // Never called /* do nothing */ } public static List<PsiMethod> getEnclosingMethods(PsiMethod nearest) { List<PsiMethod> enclosingMethods = new ArrayList<>(); enclosingMethods.add(nearest); PsiMethod method = nearest; while(true) { method = PsiTreeUtil.getParentOfType(method, PsiMethod.class, true); if (method == null) break; enclosingMethods.add(method); } if (enclosingMethods.size() > 1) { List<PsiMethod> methodsNotImplementingLibraryInterfaces = new ArrayList<>(); for(PsiMethod enclosing: enclosingMethods) { PsiMethod[] superMethods = enclosing.findDeepestSuperMethods(); boolean libraryInterfaceMethod = false; for(PsiMethod superMethod: superMethods) { libraryInterfaceMethod |= isLibraryInterfaceMethod(superMethod); } if (!libraryInterfaceMethod) { methodsNotImplementingLibraryInterfaces.add(enclosing); } } if (methodsNotImplementingLibraryInterfaces.size() > 0) { return methodsNotImplementingLibraryInterfaces; } } return enclosingMethods; } @Nullable public static PsiMethod chooseEnclosingMethod(@NotNull PsiMethod method) { final List<PsiMethod> validEnclosingMethods = getEnclosingMethods(method); if (validEnclosingMethods.size() > 1 && !ApplicationManager.getApplication().isUnitTestMode()) { final EnclosingMethodSelectionDialog dialog = new EnclosingMethodSelectionDialog(method.getProject(), validEnclosingMethods); if (!dialog.showAndGet()) { return null; } method = dialog.getSelectedMethod(); } else if (validEnclosingMethods.size() == 1) { method = validEnclosingMethods.get(0); } return method; } private static boolean isLibraryInterfaceMethod(final PsiMethod method) { return method.hasModifierProperty(PsiModifier.ABSTRACT) && !method.getManager().isInProject(method); } private class Introducer { private final Project myProject; private PsiExpression myExpr; private PsiLocalVariable myLocalVar; private final Editor myEditor; public Introducer(Project project, PsiExpression expr, PsiLocalVariable localVar, Editor editor) { myProject = project; myExpr = expr; myLocalVar = localVar; myEditor = editor; } public void introduceParameter(PsiMethod method, PsiMethod methodToSearchFor) { PsiExpression[] occurences; if (myExpr != null) { occurences = new ExpressionOccurrenceManager(myExpr, method, null).findExpressionOccurrences(); } else { // local variable occurences = CodeInsightUtil.findReferenceExpressions(method, myLocalVar); } String enteredName = null; boolean replaceAllOccurrences = false; boolean delegate = false; PsiType initializerType = IntroduceParameterProcessor.getInitializerType(null, myExpr, myLocalVar); final AbstractInplaceIntroducer activeIntroducer = AbstractInplaceIntroducer.getActiveIntroducer(myEditor); if (activeIntroducer != null) { activeIntroducer.stopIntroduce(myEditor); myExpr = (PsiExpression)activeIntroducer.getExpr(); myLocalVar = (PsiLocalVariable)activeIntroducer.getLocalVariable(); occurences = (PsiExpression[])activeIntroducer.getOccurrences(); enteredName = activeIntroducer.getInputName(); replaceAllOccurrences = activeIntroducer.isReplaceAllOccurrences(); delegate = ((InplaceIntroduceParameterPopup)activeIntroducer).isGenerateDelegate(); initializerType = ((AbstractJavaInplaceIntroducer)activeIntroducer).getType(); } boolean mustBeFinal = false; if (myExpr != null) { final PsiElement parent = myExpr.getUserData(ElementToWorkOn.PARENT); mustBeFinal = parent != null && PsiTreeUtil.getParentOfType(parent, PsiClass.class, PsiMethod.class) != method; } for (PsiExpression occurrence : occurences) { if (PsiTreeUtil.getParentOfType(occurrence, PsiClass.class, PsiMethod.class) != method) { mustBeFinal = true; break; } } final String propName = myLocalVar != null ? JavaCodeStyleManager .getInstance(myProject).variableNameToPropertyName(myLocalVar.getName(), VariableKind.LOCAL_VARIABLE) : null; boolean isInplaceAvailableOnDataContext = myEditor != null && myEditor.getSettings().isVariableInplaceRenameEnabled(); if (myExpr != null) { isInplaceAvailableOnDataContext &= myExpr.isPhysical(); } if (isInplaceAvailableOnDataContext && activeIntroducer == null) { myInplaceIntroduceParameterPopup = new InplaceIntroduceParameterPopup(myProject, myEditor, createTypeSelectorManager(occurences, initializerType), myExpr, myLocalVar, method, methodToSearchFor, occurences, getParamsToRemove(method, occurences), mustBeFinal); if (myInplaceIntroduceParameterPopup.startInplaceIntroduceTemplate()) { return; } } if (ApplicationManager.getApplication().isUnitTestMode()) { @NonNls String parameterName = "anObject"; boolean replaceAllOccurences = true; boolean isDeleteLocalVariable = true; PsiExpression initializer = myLocalVar != null && myExpr == null ? myLocalVar.getInitializer() : myExpr; new IntroduceParameterProcessor(myProject, method, methodToSearchFor, initializer, myExpr, myLocalVar, isDeleteLocalVariable, parameterName, replaceAllOccurences, IntroduceParameterRefactoring.REPLACE_FIELDS_WITH_GETTERS_NONE, mustBeFinal, false, null, getParamsToRemove(method, occurences)).run(); } else { if (myEditor != null) { RefactoringUtil.highlightAllOccurrences(myProject, occurences, myEditor); } final List<UsageInfo> classMemberRefs = new ArrayList<>(); if (myExpr != null) { Util.analyzeExpression(myExpr, new ArrayList<>(), classMemberRefs, new ArrayList<>()); } final IntroduceParameterDialog dialog = new IntroduceParameterDialog(myProject, classMemberRefs, occurences, myLocalVar, myExpr, createNameSuggestionGenerator(myExpr, propName, myProject, enteredName), createTypeSelectorManager(occurences, initializerType), methodToSearchFor, method, getParamsToRemove(method, occurences), mustBeFinal); dialog.setReplaceAllOccurrences(replaceAllOccurrences); dialog.setGenerateDelegate(delegate); if (dialog.showAndGet()) { final Runnable cleanSelectionRunnable = () -> { if (myEditor != null && !myEditor.isDisposed()) { myEditor.getSelectionModel().removeSelection(); } }; SwingUtilities.invokeLater(cleanSelectionRunnable); } } } private TypeSelectorManagerImpl createTypeSelectorManager(PsiExpression[] occurences, PsiType initializerType) { return myExpr != null ? new TypeSelectorManagerImpl(myProject, initializerType, myExpr, occurences) : new TypeSelectorManagerImpl(myProject, initializerType, occurences); } private TIntArrayList getParamsToRemove(PsiMethod method, PsiExpression[] occurences) { PsiExpression expressionToRemoveParamFrom = myExpr; if (myExpr == null) { expressionToRemoveParamFrom = myLocalVar.getInitializer(); } return expressionToRemoveParamFrom == null ? new TIntArrayList() : Util .findParametersToRemove(method, expressionToRemoveParamFrom, occurences); } } @Override public AbstractInplaceIntroducer getInplaceIntroducer() { return myInplaceIntroduceParameterPopup; } @VisibleForTesting public boolean introduceStrategy(final Project project, final Editor editor, PsiFile file) { final SelectionModel selectionModel = editor.getSelectionModel(); if (selectionModel.hasSelection()) { final PsiElement[] elements = CodeInsightUtil.findStatementsInRange(file, selectionModel.getSelectionStart(), selectionModel.getSelectionEnd()); return introduceStrategy(project, editor, file, elements); } return false; } @VisibleForTesting public boolean introduceStrategy(final Project project, final Editor editor, PsiFile file, final PsiElement[] elements) { if (elements.length > 0) { final AbstractInplaceIntroducer inplaceIntroducer = AbstractInplaceIntroducer.getActiveIntroducer(editor); if (inplaceIntroducer instanceof InplaceIntroduceParameterPopup) { return false; } final List<PsiMethod> enclosingMethods = getEnclosingMethods(Util.getContainingMethod(elements[0])); if (enclosingMethods.isEmpty()) { return false; } final PsiElement[] elementsCopy; if (!elements[0].isPhysical()) { elementsCopy = elements; } else { final PsiFile copy = PsiFileFactory.getInstance(project) .createFileFromText(file.getName(), file.getFileType(), file.getText(), file.getModificationStamp(), false); final TextRange range = new TextRange(elements[0].getTextRange().getStartOffset(), elements[elements.length - 1].getTextRange().getEndOffset()); final PsiExpression exprInRange = CodeInsightUtil.findExpressionInRange(copy, range.getStartOffset(), range.getEndOffset()); elementsCopy = exprInRange != null ? new PsiElement[]{exprInRange} : CodeInsightUtil.findStatementsInRange(copy, range.getStartOffset(), range.getEndOffset()); } final List<PsiMethod> enclosingMethodsInCopy = getEnclosingMethods(Util.getContainingMethod(elementsCopy[0])); final MyExtractMethodProcessor processor = new MyExtractMethodProcessor(project, editor, elementsCopy, enclosingMethodsInCopy.get(enclosingMethodsInCopy.size() - 1)); try { if (!processor.prepare()) return false; processor.showDialog(); //provide context for generated method to check exceptions compatibility final PsiMethod emptyMethod = JavaPsiFacade.getElementFactory(project) .createMethodFromText(processor.generateEmptyMethod("name").getText(), elements[0]); final Collection<? extends PsiType> types = FunctionalInterfaceSuggester.suggestFunctionalInterfaces(emptyMethod); if (types.isEmpty()) { return false; } if (types.size() == 1 || ApplicationManager.getApplication().isUnitTestMode()) { final PsiType next = types.iterator().next(); functionalInterfaceSelected(next, enclosingMethods, project, editor, processor, elements); } else { final Map<PsiClass, PsiType> classes = new LinkedHashMap<>(); for (PsiType type : types) { classes.put(PsiUtil.resolveClassInType(type), type); } final PsiClass[] psiClasses = classes.keySet().toArray(new PsiClass[classes.size()]); final String methodSignature = PsiFormatUtil.formatMethod(emptyMethod, PsiSubstitutor.EMPTY, PsiFormatUtilBase.SHOW_PARAMETERS, PsiFormatUtilBase.SHOW_TYPE); final PsiType returnType = emptyMethod.getReturnType(); LOG.assertTrue(returnType != null); final String title = "Choose Applicable Functional Interface: " + methodSignature + " -> " + returnType.getPresentableText(); NavigationUtil.getPsiElementPopup(psiClasses, PsiClassListCellRenderer.INSTANCE, title, new PsiElementProcessor<PsiClass>() { @Override public boolean execute(@NotNull PsiClass psiClass) { functionalInterfaceSelected(classes.get(psiClass), enclosingMethods, project, editor, processor, elements); return true; } }).showInBestPositionFor(editor); return true; } return true; } catch (IncorrectOperationException ignore) {} catch (PrepareFailedException ignore) {} } return false; } private void functionalInterfaceSelected(final PsiType selectedType, final List<PsiMethod> enclosingMethods, final Project project, final Editor editor, final MyExtractMethodProcessor processor, final PsiElement[] elements) { final PairConsumer<PsiMethod, PsiMethod> consumer = (methodToIntroduceParameter, methodToSearchFor) -> introduceWrappedCodeBlockParameter(methodToIntroduceParameter, methodToSearchFor, editor, project, selectedType, processor, elements); chooseMethodToIntroduceParameter(editor, enclosingMethods, consumer); } private void introduceWrappedCodeBlockParameter(PsiMethod methodToIntroduceParameter, PsiMethod methodToSearchFor, Editor editor, final Project project, final PsiType selectedType, final MyExtractMethodProcessor processor, final PsiElement[] elements) { final PsiElement commonParent = findCommonParent(elements); if (commonParent == null) { LOG.error("Should have common parent:" + Arrays.toString(elements)); return; } final RangeMarker marker = editor.getDocument().createRangeMarker(commonParent.getTextRange()); final PsiElement[] copyElements = processor.getElements(); final PsiElement containerCopy = findCommonParent(copyElements); if (containerCopy == null) { LOG.error("Should have common parent:" + Arrays.toString(copyElements)); return; } final PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(selectedType); final PsiClass wrapperClass = resolveResult.getElement(); LOG.assertTrue(wrapperClass != null); final PsiElementFactory factory = JavaPsiFacade.getElementFactory(project); final Ref<String> suffixText = new Ref<>(); final Ref<String> prefixText = new Ref<>(); final Ref<String> methodText = new Ref<>(); WriteCommandAction.runWriteCommandAction(project, () -> { final PsiMethod method = LambdaUtil.getFunctionalInterfaceMethod(wrapperClass); LOG.assertTrue(method != null); final String interfaceMethodName = method.getName(); processor.setMethodName(interfaceMethodName); if (copyElements.length == 1 && copyElements[0].getUserData(ElementToWorkOn.PARENT) == null) { copyElements[0].putUserData(ElementToWorkOn.REPLACE_NON_PHYSICAL, true); } processor.doExtract(); final PsiMethod extractedMethod = processor.getExtractedMethod(); final PsiParameter[] parameters = extractedMethod.getParameterList().getParameters(); final PsiParameter[] interfaceParameters = method.getParameterList().getParameters(); final PsiSubstitutor substitutor = resolveResult.getSubstitutor(); for (int i = 0; i < interfaceParameters.length; i++) { final PsiTypeElement typeAfterInterface = factory.createTypeElement(substitutor.substitute(interfaceParameters[i].getType())); final PsiTypeElement typeElement = parameters[i].getTypeElement(); if (typeElement != null) { typeElement.replace(typeAfterInterface); } } methodText.set(extractedMethod.getText()); final PsiMethodCallExpression methodCall = processor.getMethodCall(); prefixText.set(containerCopy.getText().substring(0, methodCall.getTextRange().getStartOffset() - containerCopy.getTextRange().getStartOffset())); suffixText.set("." + methodCall.getText() + containerCopy.getText().substring(methodCall.getTextRange().getEndOffset() - containerCopy.getTextRange().getStartOffset())); }); PsiExpression expression = factory .createExpressionFromText("new " + selectedType.getCanonicalText() + "() {" + methodText.get() + "}", elements[0]); expression = (PsiExpression)JavaCodeStyleManager.getInstance(project).shortenClassReferences(expression); expression.putUserData(ElementToWorkOn.PARENT, commonParent); expression.putUserData(ElementToWorkOn.PREFIX, prefixText.get()); expression.putUserData(ElementToWorkOn.SUFFIX, suffixText.get()); expression.putUserData(ElementToWorkOn.TEXT_RANGE, marker); expression.putUserData(ElementToWorkOn.EXPR_RANGE, elements.length == 1 ? elements[0].getTextRange() : null); new Introducer(project, expression, null, editor) .introduceParameter(methodToIntroduceParameter, methodToSearchFor); } @Nullable private static PsiElement findCommonParent(PsiElement[] copyElements) { if (copyElements.length > 1) { return PsiTreeUtil.findCommonParent(copyElements); } else { PsiElement parent = copyElements[0].getUserData(ElementToWorkOn.PARENT); if (parent == null) { parent = copyElements[0].getParent(); } return PsiTreeUtil.getParentOfType(parent, PsiCodeBlock.class, false); } } private static class MyExtractMethodProcessor extends ExtractMethodProcessor { private final PsiMethod myTopEnclosingMethod; public MyExtractMethodProcessor(Project project, Editor editor, PsiElement[] elements, PsiMethod topEnclosing) { super(project, editor, elements, null, REFACTORING_NAME, null, null); myTopEnclosingMethod = topEnclosing; } @Override protected AbstractExtractDialog createExtractMethodDialog(boolean direct) { return new MyAbstractExtractDialog(); } @Override protected boolean isNeedToChangeCallContext() { return false; } public void setMethodName(String methodName) { myMethodName = methodName; } @Override public Boolean hasDuplicates() { return false; } @Override public boolean isStatic() { return false; } @Override protected boolean isFoldingApplicable() { return false; } @Override protected PsiMethod addExtractedMethod(PsiMethod newMethod) { return newMethod; } @Override public boolean prepare(@Nullable Pass<ExtractMethodProcessor> pass) throws PrepareFailedException { final boolean prepare = super.prepare(pass); if (prepare) { if (myNotNullConditionalCheck || myNullConditionalCheck) { return false; } } return prepare; } private class MyAbstractExtractDialog implements AbstractExtractDialog { @Override public String getChosenMethodName() { return "name"; } @Override public VariableData[] getChosenParameters() { final InputVariables inputVariables = getInputVariables(); List<VariableData> datas = new ArrayList<>(); for (VariableData data : inputVariables.getInputVariables()) { final PsiVariable variable = data.variable; if (variable instanceof PsiParameter && myTopEnclosingMethod.equals(((PsiParameter)variable).getDeclarationScope())) { continue; } datas.add(data); } return datas.toArray(new VariableData[datas.size()]); } @NotNull @Override public String getVisibility() { return PsiModifier.PUBLIC; } @Override public boolean isMakeStatic() { return false; } @Override public boolean isChainedConstructor() { return false; } @Override public PsiType getReturnType() { return null; } @Override public void show() {} @Override public boolean isOK() { return true; } } } }
/* * Copyright 2014 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.net.statistic.impl; import com.google.common.base.MoreObjects; import com.google.common.base.Predicate; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableSet; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.ReferenceCardinality; import org.apache.felix.scr.annotations.Service; import org.onosproject.core.ApplicationId; import org.onosproject.core.GroupId; import org.onosproject.net.ConnectPoint; import org.onosproject.net.Link; import org.onosproject.net.Path; import org.onosproject.net.flow.FlowEntry; import org.onosproject.net.flow.FlowRule; import org.onosproject.net.flow.FlowRuleEvent; import org.onosproject.net.flow.FlowRuleListener; import org.onosproject.net.flow.FlowRuleService; import org.onosproject.net.statistic.DefaultLoad; import org.onosproject.net.statistic.Load; import org.onosproject.net.statistic.StatisticService; import org.onosproject.net.statistic.StatisticStore; import org.slf4j.Logger; import java.util.Collections; import java.util.Objects; import java.util.Optional; import java.util.Set; import static com.google.common.base.Preconditions.checkNotNull; import static org.slf4j.LoggerFactory.getLogger; import static org.onosproject.security.AppGuard.checkPermission; import static org.onosproject.security.AppPermission.Type.*; /** * Provides an implementation of the Statistic Service. */ @Component(immediate = true) @Service public class StatisticManager implements StatisticService { private final Logger log = getLogger(getClass()); @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected FlowRuleService flowRuleService; @Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY) protected StatisticStore statisticStore; private final InternalFlowRuleListener listener = new InternalFlowRuleListener(); @Activate public void activate() { flowRuleService.addListener(listener); log.info("Started"); } @Deactivate public void deactivate() { flowRuleService.removeListener(listener); log.info("Stopped"); } @Override public Load load(Link link) { checkPermission(STATISTIC_READ); return load(link.src()); } @Override public Load load(Link link, ApplicationId appId, Optional<GroupId> groupId) { checkPermission(STATISTIC_READ); Statistics stats = getStatistics(link.src()); if (!stats.isValid()) { return new DefaultLoad(); } ImmutableSet<FlowEntry> current = FluentIterable.from(stats.current()) .filter(hasApplicationId(appId)) .filter(hasGroupId(groupId)) .toSet(); ImmutableSet<FlowEntry> previous = FluentIterable.from(stats.previous()) .filter(hasApplicationId(appId)) .filter(hasGroupId(groupId)) .toSet(); return new DefaultLoad(aggregate(current), aggregate(previous)); } @Override public Load load(ConnectPoint connectPoint) { checkPermission(STATISTIC_READ); return loadInternal(connectPoint); } @Override public Link max(Path path) { checkPermission(STATISTIC_READ); if (path.links().isEmpty()) { return null; } Load maxLoad = new DefaultLoad(); Link maxLink = null; for (Link link : path.links()) { Load load = loadInternal(link.src()); if (load.rate() > maxLoad.rate()) { maxLoad = load; maxLink = link; } } return maxLink; } @Override public Link min(Path path) { checkPermission(STATISTIC_READ); if (path.links().isEmpty()) { return null; } Load minLoad = new DefaultLoad(); Link minLink = null; for (Link link : path.links()) { Load load = loadInternal(link.src()); if (load.rate() < minLoad.rate()) { minLoad = load; minLink = link; } } return minLink; } @Override public FlowRule highestHitter(ConnectPoint connectPoint) { checkPermission(STATISTIC_READ); Set<FlowEntry> hitters = statisticStore.getCurrentStatistic(connectPoint); if (hitters.isEmpty()) { return null; } FlowEntry max = hitters.iterator().next(); for (FlowEntry entry : hitters) { if (entry.bytes() > max.bytes()) { max = entry; } } return max; } private Load loadInternal(ConnectPoint connectPoint) { Statistics stats = getStatistics(connectPoint); if (!stats.isValid()) { return new DefaultLoad(); } return new DefaultLoad(aggregate(stats.current), aggregate(stats.previous)); } /** * Returns statistics of the specified port. * * @param connectPoint port to query * @return statistics */ private Statistics getStatistics(ConnectPoint connectPoint) { Set<FlowEntry> current; Set<FlowEntry> previous; synchronized (statisticStore) { current = getCurrentStatistic(connectPoint); previous = getPreviousStatistic(connectPoint); } return new Statistics(current, previous); } /** * Returns the current statistic of the specified port. * @param connectPoint port to query * @return set of flow entries */ private Set<FlowEntry> getCurrentStatistic(ConnectPoint connectPoint) { Set<FlowEntry> stats = statisticStore.getCurrentStatistic(connectPoint); if (stats == null) { return Collections.emptySet(); } else { return stats; } } /** * Returns the previous statistic of the specified port. * * @param connectPoint port to query * @return set of flow entries */ private Set<FlowEntry> getPreviousStatistic(ConnectPoint connectPoint) { Set<FlowEntry> stats = statisticStore.getPreviousStatistic(connectPoint); if (stats == null) { return Collections.emptySet(); } else { return stats; } } // TODO: make aggregation function generic by passing a function // (applying Java 8 Stream API?) /** * Aggregates a set of values. * @param values the values to aggregate * @return a long value */ private long aggregate(Set<FlowEntry> values) { long sum = 0; for (FlowEntry f : values) { sum += f.bytes(); } return sum; } /** * Internal flow rule event listener. */ private class InternalFlowRuleListener implements FlowRuleListener { @Override public void event(FlowRuleEvent event) { FlowRule rule = event.subject(); switch (event.type()) { case RULE_ADDED: case RULE_UPDATED: if (rule instanceof FlowEntry) { statisticStore.addOrUpdateStatistic((FlowEntry) rule); } break; case RULE_ADD_REQUESTED: statisticStore.prepareForStatistics(rule); break; case RULE_REMOVE_REQUESTED: statisticStore.removeFromStatistics(rule); break; case RULE_REMOVED: break; default: log.warn("Unknown flow rule event {}", event); } } } /** * Internal data class holding two set of flow entries. */ private static class Statistics { private final ImmutableSet<FlowEntry> current; private final ImmutableSet<FlowEntry> previous; public Statistics(Set<FlowEntry> current, Set<FlowEntry> previous) { this.current = ImmutableSet.copyOf(checkNotNull(current)); this.previous = ImmutableSet.copyOf(checkNotNull(previous)); } /** * Returns flow entries as the current value. * * @return flow entries as the current value */ public ImmutableSet<FlowEntry> current() { return current; } /** * Returns flow entries as the previous value. * * @return flow entries as the previous value */ public ImmutableSet<FlowEntry> previous() { return previous; } /** * Validates values are not empty. * * @return false if either of the sets is empty. Otherwise, true. */ public boolean isValid() { return !(current.isEmpty() || previous.isEmpty()); } @Override public int hashCode() { return Objects.hash(current, previous); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof Statistics)) { return false; } final Statistics other = (Statistics) obj; return Objects.equals(this.current, other.current) && Objects.equals(this.previous, other.previous); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("current", current) .add("previous", previous) .toString(); } } /** * Creates a predicate that checks the application ID of a flow entry is the same as * the specified application ID. * * @param appId application ID to be checked * @return predicate */ private static Predicate<FlowEntry> hasApplicationId(ApplicationId appId) { return new Predicate<FlowEntry>() { @Override public boolean apply(FlowEntry flowEntry) { return flowEntry.appId() == appId.id(); } }; } /** * Create a predicate that checks the group ID of a flow entry is the same as * the specified group ID. * * @param groupId group ID to be checked * @return predicate */ private static Predicate<FlowEntry> hasGroupId(Optional<GroupId> groupId) { return new Predicate<FlowEntry>() { @Override public boolean apply(FlowEntry flowEntry) { if (!groupId.isPresent()) { return false; } // FIXME: The left hand type and right hand type don't match // FlowEntry.groupId() still returns a short value, not int. return flowEntry.groupId().equals(groupId.get()); } }; } }
/* * Copyright 2010 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.typography.font.sfntly.table.core; import com.google.typography.font.sfntly.data.ReadableFontData; import com.google.typography.font.sfntly.data.WritableFontData; import com.google.typography.font.sfntly.table.Header; import com.google.typography.font.sfntly.table.Table; import com.google.typography.font.sfntly.table.TableBasedTableBuilder; import com.google.typography.font.sfntly.table.truetype.LocaTable; import java.util.EnumSet; /** * A Font Header table. * * @author Stuart Gill */ public final class FontHeaderTable extends Table { /** * Checksum adjustment base value. To compute the checksum adjustment: * 1) set it to 0; 2) sum the entire font as ULONG, 3) then store 0xB1B0AFBA - sum. */ public static final long CHECKSUM_ADJUSTMENT_BASE = 0xB1B0AFBAL; /** * Magic number value stored in the magic number field. */ public static final long MAGIC_NUMBER = 0x5F0F3CF5L; /** * The ranges to use for checksum calculation. */ private static final int[] CHECKSUM_RANGES = new int[] {0, Offset.checkSumAdjustment.offset, Offset.magicNumber.offset}; /** * Offsets to specific elements in the underlying data. These offsets are relative to the * start of the table or the start of sub-blocks within the table. */ private enum Offset { tableVersion(0), fontRevision(4), checkSumAdjustment(8), magicNumber(12), flags(16), unitsPerEm(18), created(20), modified(28), xMin(36), yMin(38), xMax(40), yMax(42), macStyle(44), lowestRecPPEM(46), fontDirectionHint(48), indexToLocFormat(50), glyphDataFormat(52); private final int offset; private Offset(int offset) { this.offset = offset; } } /** * Constructor. * * @param header the table header * @param data the readable data for the table */ private FontHeaderTable(Header header, ReadableFontData data) { super(header, data); data.setCheckSumRanges(0, Offset.checkSumAdjustment.offset, Offset.magicNumber.offset); } /** * Get the table version. * * @return the table version */ public int tableVersion() { return this.data.readFixed(Offset.tableVersion.offset); } /** * Get the font revision. * * @return the font revision */ public int fontRevision() { return this.data.readFixed(Offset.fontRevision.offset); } /** * Get the checksum adjustment. To compute: set it to 0, sum the entire font * as ULONG, then store 0xB1B0AFBA - sum. * * @return checksum adjustment */ public long checkSumAdjustment() { return this.data.readULong(Offset.checkSumAdjustment.offset); } /** * Get the magic number. Set to 0x5F0F3CF5. * * @return the magic number */ public long magicNumber() { return this.data.readULong(Offset.magicNumber.offset); } /** * Flag values in the font header table. * */ public enum Flags { BaselineAtY0, LeftSidebearingAtX0, InstructionsDependOnPointSize, ForcePPEMToInteger, InstructionsAlterAdvanceWidth, //Apple Flags Apple_Vertical, Apple_Zero, Apple_RequiresLayout, Apple_GXMetamorphosis, Apple_StrongRTL, Apple_IndicRearrangement, FontDataLossless, FontConverted, OptimizedForClearType, Reserved14, Reserved15; public int mask() { return 1 << this.ordinal(); } public static EnumSet<Flags> asSet(int value) { EnumSet<Flags> set = EnumSet.noneOf(Flags.class); for (Flags flag : Flags.values()) { if ((value & flag.mask()) == flag.mask()) { set.add(flag); } } return set; } static public int value(EnumSet<Flags> set) { int value = 0; for (Flags flag : set) { value |= flag.mask(); } return value; } static public int cleanValue(EnumSet<Flags> set) { EnumSet<Flags> clean = EnumSet.copyOf(set); clean.remove(Flags.Reserved14); clean.remove(Flags.Reserved15); return value(clean); } } /** * Get the flags as an int value. * * @return the flags */ public int flagsAsInt() { return this.data.readUShort(Offset.flags.offset); } /** * Get the flags as an enum set. * * @return the enum set of the flags */ public EnumSet<Flags> flags() { return Flags.asSet(this.flagsAsInt()); } /** * Get the units per em. * * @return the units per em */ public int unitsPerEm() { return this.data.readUShort(Offset.unitsPerEm.offset); } /** * Get the created date. Number of seconds since 12:00 midnight, January 1, * 1904. 64-bit integer. * * @return created date */ public long created() { return this.data.readDateTimeAsLong(Offset.created.offset); } /** * Get the modified date. Number of seconds since 12:00 midnight, January 1, * 1904. 64-bit integer. * * @return created date */ public long modified() { return this.data.readDateTimeAsLong(Offset.modified.offset); } /** * Get the x min. For all glyph bounding boxes. * * @return the x min */ public int xMin() { return this.data.readShort(Offset.xMin.offset); } /** * Get the y min. For all glyph bounding boxes. * * @return the y min */ public int yMin() { return this.data.readShort(Offset.yMin.offset); } /** * Get the x max. For all glyph bounding boxes. * * @return the xmax */ public int xMax() { return this.data.readShort(Offset.xMax.offset); } /** * Get the y max. For all glyph bounding boxes. * * @return the ymax */ public int yMax() { return this.data.readShort(Offset.yMax.offset); } /** * Mac style bits set in the font header table. * */ public enum MacStyle { Bold, Italic, Underline, Outline, Shadow, Condensed, Extended, Reserved7, Reserved8, Reserved9, Reserved10, Reserved11, Reserved12, Reserved13, Reserved14, Reserved15; public int mask() { return 1 << this.ordinal(); } public static EnumSet<MacStyle> asSet(int value) { EnumSet<MacStyle> set = EnumSet.noneOf(MacStyle.class); for (MacStyle style : MacStyle.values()) { if ((value & style.mask()) == style.mask()) { set.add(style); } } return set; } public static int value(EnumSet<MacStyle> set) { int value = 0; for (MacStyle style : set) { value |= style.mask(); } return value; } public static int cleanValue(EnumSet<MacStyle> set) { EnumSet<MacStyle> clean = EnumSet.copyOf(set); clean.removeAll(reserved); return value(clean); } private static final EnumSet<MacStyle> reserved = EnumSet.range(MacStyle.Reserved7, MacStyle.Reserved15); } /** * Get the Mac style bits as an int. * * @return the Mac style bits */ public int macStyleAsInt() { return this.data.readUShort(Offset.macStyle.offset); } /** * Get the Mac style bits as an enum set. * * @return the Mac style bits */ public EnumSet<MacStyle> macStyle() { return MacStyle.asSet(this.macStyleAsInt()); } public int lowestRecPPEM() { return this.data.readUShort(Offset.lowestRecPPEM.offset); } /** * Font direction hint values in the font header table. * */ public enum FontDirectionHint { FullyMixed(0), OnlyStrongLTR(1), StrongLTRAndNeutral(2), OnlyStrongRTL(-1), StrongRTLAndNeutral(-2); private final int value; private FontDirectionHint(int value) { this.value = value; } public int value() { return this.value; } public boolean equals(int value) { return value == this.value; } public static FontDirectionHint valueOf(int value) { for (FontDirectionHint hint : FontDirectionHint.values()) { if (hint.equals(value)) { return hint; } } return null; } } public int fontDirectionHintAsInt() { return this.data.readShort(Offset.fontDirectionHint.offset); } public FontDirectionHint fontDirectionHint() { return FontDirectionHint.valueOf(this.fontDirectionHintAsInt()); } /** * The index to location format used in the LocaTable. * * @see LocaTable */ public enum IndexToLocFormat { shortOffset(0), longOffset(1); private final int value; private IndexToLocFormat(int value) { this.value = value; } public int value() { return this.value; } public boolean equals(int value) { return value == this.value; } public static IndexToLocFormat valueOf(int value) { for (IndexToLocFormat format : IndexToLocFormat.values()) { if (format.equals(value)) { return format; } } return null; } } public int indexToLocFormatAsInt() { return this.data.readShort(Offset.indexToLocFormat.offset); } public IndexToLocFormat indexToLocFormat() { return IndexToLocFormat.valueOf(this.indexToLocFormatAsInt()); } public int glyphdataFormat() { return this.data.readShort(Offset.glyphDataFormat.offset); } public static class Builder extends TableBasedTableBuilder<FontHeaderTable> { private boolean fontChecksumSet = false; private long fontChecksum = 0; /** * Create a new builder using the header information and data provided. * * @param header the header information * @param data the data holding the table * @return a new builder */ public static Builder createBuilder(Header header, WritableFontData data) { return new Builder(header, data); } protected Builder(Header header, WritableFontData data) { super(header, data); data.setCheckSumRanges(0, Offset.checkSumAdjustment.offset, Offset.magicNumber.offset); } protected Builder(Header header, ReadableFontData data) { super(header, data); data.setCheckSumRanges(FontHeaderTable.CHECKSUM_RANGES); } @Override protected boolean subReadyToSerialize() { if (this.dataChanged()) { ReadableFontData data = this.internalReadData(); data.setCheckSumRanges(FontHeaderTable.CHECKSUM_RANGES); } if (this.fontChecksumSet) { ReadableFontData data = this.internalReadData(); data.setCheckSumRanges(FontHeaderTable.CHECKSUM_RANGES); long checksumAdjustment = FontHeaderTable.CHECKSUM_ADJUSTMENT_BASE - (this.fontChecksum + data.checksum()); this.setCheckSumAdjustment(checksumAdjustment); } return super.subReadyToSerialize(); } @Override protected FontHeaderTable subBuildTable(ReadableFontData data) { return new FontHeaderTable(this.header(), data); } /** * Sets the font checksum to be used when calculating the the checksum * adjustment for the header table during build time. * * The font checksum is the sum value of all tables but the font header * table. If the font checksum has been set then further setting will be * ignored until the font check sum has been cleared with * {@link #clearFontChecksum()}. Most users will never need to set this. It * is used when the font is being built. If set by a client it can interfere * with that process. * * @param checksum * the font checksum */ public void setFontChecksum(long checksum) { if (this.fontChecksumSet) { return; } this.fontChecksumSet = true; this.fontChecksum = checksum; } /** * Clears the font checksum to be used when calculating the the checksum * adjustment for the header table during build time. * * The font checksum is the sum value of all tables but the font header * table. If the font checksum has been set then further setting will be * ignored until the font check sum has been cleared. * */ public void clearFontChecksum() { this.fontChecksumSet = false; } public int tableVersion() { return this.table().tableVersion(); } public void setTableVersion(int version) { this.internalWriteData().writeFixed(Offset.tableVersion.offset, version); } public int fontRevision() { return this.table().fontRevision(); } public void setFontRevision(int revision) { this.internalWriteData().writeFixed(Offset.fontRevision.offset, revision); } public long checkSumAdjustment() { return this.table().checkSumAdjustment(); } public void setCheckSumAdjustment(long adjustment) { this.internalWriteData().writeULong(Offset.checkSumAdjustment.offset, adjustment); } public long magicNumber() { return this.table().magicNumber(); } public void setMagicNumber(long magicNumber) { this.internalWriteData().writeULong(Offset.magicNumber.offset, magicNumber); } public int flagsAsInt() { return this.table().flagsAsInt(); } public EnumSet<Flags> flags() { return this.table().flags(); } public void setFlagsAsInt(int flags) { this.internalWriteData().writeUShort(Offset.flags.offset, flags); } public void setFlags(EnumSet<Flags> flags) { setFlagsAsInt(Flags.cleanValue(flags)); } public int unitsPerEm() { return this.table().unitsPerEm(); } public void setUnitsPerEm(int units) { this.internalWriteData().writeUShort(Offset.unitsPerEm.offset, units); } public long created() { return this.table().created(); } public void setCreated(long date) { this.internalWriteData().writeDateTime(Offset.created.offset, date); } public long modified() { return this.table().modified(); } public void setModified(long date) { this.internalWriteData().writeDateTime(Offset.modified.offset, date); } public int xMin() { return this.table().xMin(); } public void setXMin(int xmin) { this.internalWriteData().writeShort(Offset.xMin.offset, xmin); } public int yMin() { return this.table().yMin(); } public void setYMin(int ymin) { this.internalWriteData().writeShort(Offset.yMin.offset, ymin); } public int xMax() { return this.table().xMax(); } public void setXMax(int xmax) { this.internalWriteData().writeShort(Offset.xMax.offset, xmax); } public int yMax() { return this.table().yMax(); } public void setYMax(int ymax) { this.internalWriteData().writeShort(Offset.yMax.offset, ymax); } public int macStyleAsInt() { return this.table().macStyleAsInt(); } public void setMacStyleAsInt(int style) { this.internalWriteData().writeUShort(Offset.macStyle.offset, style); } public EnumSet<MacStyle> macStyle() { return this.table().macStyle(); } public void macStyle(EnumSet<MacStyle> style) { this.setMacStyleAsInt(MacStyle.cleanValue(style)); } public int lowestRecPPEM() { return this.table().lowestRecPPEM(); } public void setLowestRecPPEM(int size) { this.internalWriteData().writeUShort(Offset.lowestRecPPEM.offset, size); } public int fontDirectionHintAsInt() { return this.table().fontDirectionHintAsInt(); } public void setFontDirectionHintAsInt(int hint) { this.internalWriteData().writeShort(Offset.fontDirectionHint.offset, hint); } public FontDirectionHint fontDirectionHint() { return this.table().fontDirectionHint(); } public void setFontDirectionHint(FontDirectionHint hint) { this.setFontDirectionHintAsInt(hint.value()); } public int indexToLocFormatAsInt() { return this.table().indexToLocFormatAsInt(); } public void setIndexToLocFormatAsInt(int format) { this.internalWriteData().writeShort(Offset.indexToLocFormat.offset, format); } public IndexToLocFormat indexToLocFormat() { return this.table().indexToLocFormat(); } public void setIndexToLocFormat(IndexToLocFormat format) { this.setIndexToLocFormatAsInt(format.value()); } public int glyphdataFormat() { return this.table().glyphdataFormat(); } public void setGlyphdataFormat(int format) { this.internalWriteData().writeShort(Offset.glyphDataFormat.offset, format); } } }
// ParserAdapter.java - adapt a SAX1 Parser to a SAX2 XMLReader. // http://www.saxproject.org // Written by David Megginson // NO WARRANTY! This class is in the public domain. // $Id: ParserAdapter.java,v 1.16 2004/04/26 17:34:35 dmegginson Exp $ package org.xml.sax.helpers; import java.io.IOException; import java.util.ArrayList; import java.util.Enumeration; import org.xml.sax.AttributeList; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; import org.xml.sax.DTDHandler; import org.xml.sax.DocumentHandler; import org.xml.sax.EntityResolver; import org.xml.sax.ErrorHandler; import org.xml.sax.InputSource; import org.xml.sax.Locator; import org.xml.sax.Parser; import org.xml.sax.SAXException; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXNotSupportedException; import org.xml.sax.SAXParseException; import org.xml.sax.XMLReader; /** * Adapt a SAX1 Parser as a SAX2 XMLReader. * * <blockquote> * <em>This module, both source code and documentation, is in the * Public Domain, and comes with <strong>NO WARRANTY</strong>.</em> * See <a href='http://www.saxproject.org'>http://www.saxproject.org</a> * for further information. * </blockquote> * * <p>This class wraps a SAX1 {@link org.xml.sax.Parser Parser} * and makes it act as a SAX2 {@link org.xml.sax.XMLReader XMLReader}, * with feature, property, and Namespace support. Note * that it is not possible to report {@link org.xml.sax.ContentHandler#skippedEntity * skippedEntity} events, since SAX1 does not make that information available.</p> * * <p>This adapter does not test for duplicate Namespace-qualified * attribute names.</p> * * @since SAX 2.0 * @author David Megginson * @version 2.0.1 (sax2r2) * @see org.xml.sax.helpers.XMLReaderAdapter * @see org.xml.sax.XMLReader * @see org.xml.sax.Parser */ public class ParserAdapter implements XMLReader, DocumentHandler { //////////////////////////////////////////////////////////////////// // Constructors. //////////////////////////////////////////////////////////////////// /** * Construct a new parser adapter. * * <p>Use the "org.xml.sax.parser" property to locate the * embedded SAX1 driver.</p> * * @exception SAXException If the embedded driver * cannot be instantiated or if the * org.xml.sax.parser property is not specified. */ public ParserAdapter () throws SAXException { String driver = System.getProperty("org.xml.sax.parser"); try { setup(ParserFactory.makeParser()); } catch (ClassNotFoundException e1) { throw new SAXException("Cannot find SAX1 driver class " + driver, e1); } catch (IllegalAccessException e2) { throw new SAXException("SAX1 driver class " + driver + " found but cannot be loaded", e2); } catch (InstantiationException e3) { throw new SAXException("SAX1 driver class " + driver + " loaded but cannot be instantiated", e3); } catch (ClassCastException e4) { throw new SAXException("SAX1 driver class " + driver + " does not implement org.xml.sax.Parser"); } catch (NullPointerException e5) { throw new SAXException("System property org.xml.sax.parser not specified"); } } /** * Construct a new parser adapter. * * <p>Note that the embedded parser cannot be changed once the * adapter is created; to embed a different parser, allocate * a new ParserAdapter.</p> * * @param parser The SAX1 parser to embed. * @exception java.lang.NullPointerException If the parser parameter * is null. */ public ParserAdapter (Parser parser) { setup(parser); } /** * Internal setup method. * * @param parser The embedded parser. * @exception java.lang.NullPointerException If the parser parameter * is null. */ private void setup (Parser parser) { if (parser == null) { throw new NullPointerException("Parser argument must not be null"); } this.parser = parser; atts = new AttributesImpl(); nsSupport = new NamespaceSupport(); attAdapter = new AttributeListAdapter(); } //////////////////////////////////////////////////////////////////// // Implementation of org.xml.sax.XMLReader. //////////////////////////////////////////////////////////////////// // // Internal constants for the sake of convenience. // private static final String FEATURES = "http://xml.org/sax/features/"; private static final String NAMESPACES = FEATURES + "namespaces"; private static final String NAMESPACE_PREFIXES = FEATURES + "namespace-prefixes"; private static final String XMLNS_URIs = FEATURES + "xmlns-uris"; /** * Set a feature flag for the parser. * * <p>The only features recognized are namespaces and * namespace-prefixes.</p> * * @param name The feature name, as a complete URI. * @param value The requested feature value. * @exception SAXNotRecognizedException If the feature * can't be assigned or retrieved. * @exception SAXNotSupportedException If the feature * can't be assigned that value. * @see org.xml.sax.XMLReader#setFeature */ public void setFeature (String name, boolean value) throws SAXNotRecognizedException, SAXNotSupportedException { if (name.equals(NAMESPACES)) { checkNotParsing("feature", name); namespaces = value; if (!namespaces && !prefixes) { prefixes = true; } } else if (name.equals(NAMESPACE_PREFIXES)) { checkNotParsing("feature", name); prefixes = value; if (!prefixes && !namespaces) { namespaces = true; } } else if (name.equals(XMLNS_URIs)) { checkNotParsing("feature", name); uris = value; } else { throw new SAXNotRecognizedException("Feature: " + name); } } /** * Check a parser feature flag. * * <p>The only features recognized are namespaces and * namespace-prefixes.</p> * * @param name The feature name, as a complete URI. * @return The current feature value. * @exception SAXNotRecognizedException If the feature * value can't be assigned or retrieved. * @exception SAXNotSupportedException If the * feature is not currently readable. * @see org.xml.sax.XMLReader#setFeature */ public boolean getFeature (String name) throws SAXNotRecognizedException, SAXNotSupportedException { if (name.equals(NAMESPACES)) { return namespaces; } else if (name.equals(NAMESPACE_PREFIXES)) { return prefixes; } else if (name.equals(XMLNS_URIs)) { return uris; } else { throw new SAXNotRecognizedException("Feature: " + name); } } /** * Set a parser property. * * <p>No properties are currently recognized.</p> * * @param name The property name. * @param value The property value. * @exception SAXNotRecognizedException If the property * value can't be assigned or retrieved. * @exception SAXNotSupportedException If the property * can't be assigned that value. * @see org.xml.sax.XMLReader#setProperty */ public void setProperty (String name, Object value) throws SAXNotRecognizedException, SAXNotSupportedException { throw new SAXNotRecognizedException("Property: " + name); } /** * Get a parser property. * * <p>No properties are currently recognized.</p> * * @param name The property name. * @return The property value. * @exception SAXNotRecognizedException If the property * value can't be assigned or retrieved. * @exception SAXNotSupportedException If the property * value is not currently readable. * @see org.xml.sax.XMLReader#getProperty */ public Object getProperty (String name) throws SAXNotRecognizedException, SAXNotSupportedException { throw new SAXNotRecognizedException("Property: " + name); } /** * Set the entity resolver. * * @param resolver The new entity resolver. * @see org.xml.sax.XMLReader#setEntityResolver */ public void setEntityResolver (EntityResolver resolver) { entityResolver = resolver; } /** * Return the current entity resolver. * * @return The current entity resolver, or null if none was supplied. * @see org.xml.sax.XMLReader#getEntityResolver */ public EntityResolver getEntityResolver () { return entityResolver; } /** * Set the DTD handler. * * @param handler the new DTD handler * @see org.xml.sax.XMLReader#setEntityResolver */ public void setDTDHandler (DTDHandler handler) { dtdHandler = handler; } /** * Return the current DTD handler. * * @return the current DTD handler, or null if none was supplied * @see org.xml.sax.XMLReader#getEntityResolver */ public DTDHandler getDTDHandler () { return dtdHandler; } /** * Set the content handler. * * @param handler the new content handler * @see org.xml.sax.XMLReader#setEntityResolver */ public void setContentHandler (ContentHandler handler) { contentHandler = handler; } /** * Return the current content handler. * * @return The current content handler, or null if none was supplied. * @see org.xml.sax.XMLReader#getEntityResolver */ public ContentHandler getContentHandler () { return contentHandler; } /** * Set the error handler. * * @param handler The new error handler. * @see org.xml.sax.XMLReader#setEntityResolver */ public void setErrorHandler (ErrorHandler handler) { errorHandler = handler; } /** * Return the current error handler. * * @return The current error handler, or null if none was supplied. * @see org.xml.sax.XMLReader#getEntityResolver */ public ErrorHandler getErrorHandler () { return errorHandler; } /** * Parse an XML document. * * @param systemId The absolute URL of the document. * @exception java.io.IOException If there is a problem reading * the raw content of the document. * @exception SAXException If there is a problem * processing the document. * @see #parse(org.xml.sax.InputSource) * @see org.xml.sax.Parser#parse(java.lang.String) */ public void parse (String systemId) throws IOException, SAXException { parse(new InputSource(systemId)); } /** * Parse an XML document. * * @param input An input source for the document. * @exception java.io.IOException If there is a problem reading * the raw content of the document. * @exception SAXException If there is a problem * processing the document. * @see #parse(java.lang.String) * @see org.xml.sax.Parser#parse(org.xml.sax.InputSource) */ public void parse (InputSource input) throws IOException, SAXException { if (parsing) { throw new SAXException("Parser is already in use"); } setupParser(); parsing = true; try { parser.parse(input); } finally { parsing = false; } parsing = false; } //////////////////////////////////////////////////////////////////// // Implementation of org.xml.sax.DocumentHandler. //////////////////////////////////////////////////////////////////// /** * Adapter implementation method; do not call. * Adapt a SAX1 document locator event. * * @param locator A document locator. * @see org.xml.sax.ContentHandler#setDocumentLocator */ public void setDocumentLocator (Locator locator) { this.locator = locator; if (contentHandler != null) { contentHandler.setDocumentLocator(locator); } } /** * Adapter implementation method; do not call. * Adapt a SAX1 start document event. * * @exception SAXException The client may raise a * processing exception. * @see org.xml.sax.DocumentHandler#startDocument */ public void startDocument () throws SAXException { if (contentHandler != null) { contentHandler.startDocument(); } } /** * Adapter implementation method; do not call. * Adapt a SAX1 end document event. * * @exception SAXException The client may raise a * processing exception. * @see org.xml.sax.DocumentHandler#endDocument */ public void endDocument () throws SAXException { if (contentHandler != null) { contentHandler.endDocument(); } } /** * Adapter implementation method; do not call. * Adapt a SAX1 startElement event. * * <p>If necessary, perform Namespace processing.</p> * * @param qName The qualified (prefixed) name. * @param qAtts The XML attribute list (with qnames). * @exception SAXException The client may raise a * processing exception. */ public void startElement (String qName, AttributeList qAtts) throws SAXException { // These are exceptions from the // first pass; they should be // ignored if there's a second pass, // but reported otherwise. ArrayList<SAXParseException> exceptions = null; // If we're not doing Namespace // processing, dispatch this quickly. if (!namespaces) { if (contentHandler != null) { attAdapter.setAttributeList(qAtts); contentHandler.startElement("", "", qName.intern(), attAdapter); } return; } // OK, we're doing Namespace processing. nsSupport.pushContext(); int length = qAtts.getLength(); // First pass: handle NS decls for (int i = 0; i < length; i++) { String attQName = qAtts.getName(i); if (!attQName.startsWith("xmlns")) continue; // Could be a declaration... String prefix; int n = attQName.indexOf(':'); // xmlns=... if (n == -1 && attQName.length () == 5) { prefix = ""; } else if (n != 5) { // XML namespaces spec doesn't discuss "xmlnsf:oo" // (and similarly named) attributes ... at most, warn continue; } else // xmlns:foo=... prefix = attQName.substring(n+1); String value = qAtts.getValue(i); if (!nsSupport.declarePrefix(prefix, value)) { reportError("Illegal Namespace prefix: " + prefix); continue; } if (contentHandler != null) contentHandler.startPrefixMapping(prefix, value); } // Second pass: copy all relevant // attributes into the SAX2 AttributeList // using updated prefix bindings atts.clear(); for (int i = 0; i < length; i++) { String attQName = qAtts.getName(i); String type = qAtts.getType(i); String value = qAtts.getValue(i); // Declaration? if (attQName.startsWith("xmlns")) { String prefix; int n = attQName.indexOf(':'); if (n == -1 && attQName.length () == 5) { prefix = ""; } else if (n != 5) { // XML namespaces spec doesn't discuss "xmlnsf:oo" // (and similarly named) attributes ... ignore prefix = null; } else { prefix = attQName.substring(6); } // Yes, decl: report or prune if (prefix != null) { if (prefixes) { if (uris) // note funky case: localname can be null // when declaring the default prefix, and // yet the uri isn't null. atts.addAttribute (nsSupport.XMLNS, prefix, attQName.intern(), type, value); else atts.addAttribute ("", "", attQName.intern(), type, value); } continue; } } // Not a declaration -- report try { String attName[] = processName(attQName, true, true); atts.addAttribute(attName[0], attName[1], attName[2], type, value); } catch (SAXException e) { if (exceptions == null) { exceptions = new ArrayList<SAXParseException>(); } exceptions.add((SAXParseException) e); atts.addAttribute("", attQName, attQName, type, value); } } // now handle the deferred exception reports if (exceptions != null && errorHandler != null) { for (SAXParseException ex : exceptions) { errorHandler.error(ex); } } // OK, finally report the event. if (contentHandler != null) { String name[] = processName(qName, false, false); contentHandler.startElement(name[0], name[1], name[2], atts); } } /** * Adapter implementation method; do not call. * Adapt a SAX1 end element event. * * @param qName The qualified (prefixed) name. * @exception SAXException The client may raise a * processing exception. * @see org.xml.sax.DocumentHandler#endElement */ public void endElement (String qName) throws SAXException { // If we're not doing Namespace // processing, dispatch this quickly. if (!namespaces) { if (contentHandler != null) { contentHandler.endElement("", "", qName.intern()); } return; } // Split the name. String names[] = processName(qName, false, false); if (contentHandler != null) { contentHandler.endElement(names[0], names[1], names[2]); Enumeration prefixes = nsSupport.getDeclaredPrefixes(); while (prefixes.hasMoreElements()) { String prefix = (String)prefixes.nextElement(); contentHandler.endPrefixMapping(prefix); } } nsSupport.popContext(); } /** * Adapter implementation method; do not call. * Adapt a SAX1 characters event. * * @param ch An array of characters. * @param start The starting position in the array. * @param length The number of characters to use. * @exception SAXException The client may raise a * processing exception. * @see org.xml.sax.DocumentHandler#characters */ public void characters (char ch[], int start, int length) throws SAXException { if (contentHandler != null) { contentHandler.characters(ch, start, length); } } /** * Adapter implementation method; do not call. * Adapt a SAX1 ignorable whitespace event. * * @param ch An array of characters. * @param start The starting position in the array. * @param length The number of characters to use. * @exception SAXException The client may raise a * processing exception. * @see org.xml.sax.DocumentHandler#ignorableWhitespace */ public void ignorableWhitespace (char ch[], int start, int length) throws SAXException { if (contentHandler != null) { contentHandler.ignorableWhitespace(ch, start, length); } } /** * Adapter implementation method; do not call. * Adapt a SAX1 processing instruction event. * * @param target The processing instruction target. * @param data The remainder of the processing instruction * @exception SAXException The client may raise a * processing exception. * @see org.xml.sax.DocumentHandler#processingInstruction */ public void processingInstruction (String target, String data) throws SAXException { if (contentHandler != null) { contentHandler.processingInstruction(target, data); } } //////////////////////////////////////////////////////////////////// // Internal utility methods. //////////////////////////////////////////////////////////////////// /** * Initialize the parser before each run. */ private void setupParser () { // catch an illegal "nonsense" state. if (!prefixes && !namespaces) throw new IllegalStateException (); nsSupport.reset(); if (uris) nsSupport.setNamespaceDeclUris (true); if (entityResolver != null) { parser.setEntityResolver(entityResolver); } if (dtdHandler != null) { parser.setDTDHandler(dtdHandler); } if (errorHandler != null) { parser.setErrorHandler(errorHandler); } parser.setDocumentHandler(this); locator = null; } /** * Process a qualified (prefixed) name. * * <p>If the name has an undeclared prefix, use only the qname * and make an ErrorHandler.error callback in case the app is * interested.</p> * * @param qName The qualified (prefixed) name. * @param isAttribute true if this is an attribute name. * @return The name split into three parts. * @exception SAXException The client may throw * an exception if there is an error callback. */ private String [] processName (String qName, boolean isAttribute, boolean useException) throws SAXException { String parts[] = nsSupport.processName(qName, nameParts, isAttribute); if (parts == null) { if (useException) throw makeException("Undeclared prefix: " + qName); reportError("Undeclared prefix: " + qName); parts = new String[3]; parts[0] = parts[1] = ""; parts[2] = qName.intern(); } return parts; } /** * Report a non-fatal error. * * @param message The error message. * @exception SAXException The client may throw * an exception. */ void reportError (String message) throws SAXException { if (errorHandler != null) errorHandler.error(makeException(message)); } /** * Construct an exception for the current context. * * @param message The error message. */ private SAXParseException makeException (String message) { if (locator != null) { return new SAXParseException(message, locator); } else { return new SAXParseException(message, null, null, -1, -1); } } /** * Throw an exception if we are parsing. * * <p>Use this method to detect illegal feature or * property changes.</p> * * @param type The type of thing (feature or property). * @param name The feature or property name. * @exception SAXNotSupportedException If a * document is currently being parsed. */ private void checkNotParsing (String type, String name) throws SAXNotSupportedException { if (parsing) { throw new SAXNotSupportedException("Cannot change " + type + ' ' + name + " while parsing"); } } //////////////////////////////////////////////////////////////////// // Internal state. //////////////////////////////////////////////////////////////////// private NamespaceSupport nsSupport; private AttributeListAdapter attAdapter; private boolean parsing = false; private String nameParts[] = new String[3]; private Parser parser = null; private AttributesImpl atts = null; // Features private boolean namespaces = true; private boolean prefixes = false; private boolean uris = false; // Properties // Handlers Locator locator; EntityResolver entityResolver = null; DTDHandler dtdHandler = null; ContentHandler contentHandler = null; ErrorHandler errorHandler = null; //////////////////////////////////////////////////////////////////// // Inner class to wrap an AttributeList when not doing NS proc. //////////////////////////////////////////////////////////////////// /** * Adapt a SAX1 AttributeList as a SAX2 Attributes object. * * <p>This class is in the Public Domain, and comes with NO * WARRANTY of any kind.</p> * * <p>This wrapper class is used only when Namespace support * is disabled -- it provides pretty much a direct mapping * from SAX1 to SAX2, except that names and types are * interned whenever requested.</p> */ final class AttributeListAdapter implements Attributes { /** * Construct a new adapter. */ AttributeListAdapter () { } /** * Set the embedded AttributeList. * * <p>This method must be invoked before any of the others * can be used.</p> * * @param The SAX1 attribute list (with qnames). */ void setAttributeList (AttributeList qAtts) { this.qAtts = qAtts; } /** * Return the length of the attribute list. * * @return The number of attributes in the list. * @see org.xml.sax.Attributes#getLength */ public int getLength () { return qAtts.getLength(); } /** * Return the Namespace URI of the specified attribute. * * @param The attribute's index. * @return Always the empty string. * @see org.xml.sax.Attributes#getURI */ public String getURI (int i) { return ""; } /** * Return the local name of the specified attribute. * * @param The attribute's index. * @return Always the empty string. * @see org.xml.sax.Attributes#getLocalName */ public String getLocalName (int i) { return ""; } /** * Return the qualified (prefixed) name of the specified attribute. * * @param The attribute's index. * @return The attribute's qualified name, internalized. */ public String getQName (int i) { return qAtts.getName(i).intern(); } /** * Return the type of the specified attribute. * * @param The attribute's index. * @return The attribute's type as an internalized string. */ public String getType (int i) { return qAtts.getType(i).intern(); } /** * Return the value of the specified attribute. * * @param The attribute's index. * @return The attribute's value. */ public String getValue (int i) { return qAtts.getValue(i); } /** * Look up an attribute index by Namespace name. * * @param uri The Namespace URI or the empty string. * @param localName The local name. * @return The attributes index, or -1 if none was found. * @see org.xml.sax.Attributes#getIndex(java.lang.String,java.lang.String) */ public int getIndex (String uri, String localName) { return -1; } /** * Look up an attribute index by qualified (prefixed) name. * * @param qName The qualified name. * @return The attributes index, or -1 if none was found. * @see org.xml.sax.Attributes#getIndex(java.lang.String) */ public int getIndex (String qName) { int max = atts.getLength(); for (int i = 0; i < max; i++) { if (qAtts.getName(i).equals(qName)) { return i; } } return -1; } /** * Look up the type of an attribute by Namespace name. * * @param uri The Namespace URI * @param localName The local name. * @return The attribute's type as an internalized string. */ public String getType (String uri, String localName) { return null; } /** * Look up the type of an attribute by qualified (prefixed) name. * * @param qName The qualified name. * @return The attribute's type as an internalized string. */ public String getType (String qName) { return qAtts.getType(qName).intern(); } /** * Look up the value of an attribute by Namespace name. * * @param uri The Namespace URI * @param localName The local name. * @return The attribute's value. */ public String getValue (String uri, String localName) { return null; } /** * Look up the value of an attribute by qualified (prefixed) name. * * @param qName The qualified name. * @return The attribute's value. */ public String getValue (String qName) { return qAtts.getValue(qName); } private AttributeList qAtts; } } // end of ParserAdapter.java
package fr.free.nrw.commons.upload; import android.annotation.SuppressLint; import android.app.PendingIntent; import android.content.ContentResolver; import android.content.Intent; import android.graphics.BitmapFactory; import android.os.Binder; import android.os.Bundle; import android.os.IBinder; import androidx.core.app.NotificationCompat; import androidx.core.app.NotificationManagerCompat; import fr.free.nrw.commons.BuildConfig; import fr.free.nrw.commons.CommonsApplication; import fr.free.nrw.commons.Media; import fr.free.nrw.commons.R; import fr.free.nrw.commons.auth.SessionManager; import fr.free.nrw.commons.contributions.ChunkInfo; import fr.free.nrw.commons.contributions.Contribution; import fr.free.nrw.commons.contributions.ContributionDao; import fr.free.nrw.commons.contributions.MainActivity; import fr.free.nrw.commons.di.CommonsApplicationModule; import fr.free.nrw.commons.di.CommonsDaggerService; import fr.free.nrw.commons.kvstore.JsonKvStore; import fr.free.nrw.commons.media.MediaClient; import fr.free.nrw.commons.utils.ViewUtil; import fr.free.nrw.commons.wikidata.WikidataEditService; import io.reactivex.Completable; import io.reactivex.Observable; import io.reactivex.ObservableSource; import io.reactivex.Scheduler; import io.reactivex.disposables.CompositeDisposable; import io.reactivex.functions.Consumer; import io.reactivex.functions.Function; import io.reactivex.processors.PublishProcessor; import io.reactivex.schedulers.Schedulers; import java.io.IOException; import java.util.Arrays; import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.inject.Inject; import javax.inject.Named; import timber.log.Timber; public class UploadService extends CommonsDaggerService { private static final String EXTRA_PREFIX = "fr.free.nrw.commons.upload"; private static final List<String> STASH_ERROR_CODES = Arrays .asList("uploadstash-file-not-found", "stashfailed", "verification-error", "chunk-too-small"); public static final String ACTION_START_SERVICE = EXTRA_PREFIX + ".upload"; public static final String PROCESS_PENDING_LIMITED_CONNECTION_MODE_UPLOADS = EXTRA_PREFIX + "process_limited_connection_mode_uploads"; public static final String EXTRA_FILES = EXTRA_PREFIX + ".files"; @Inject WikidataEditService wikidataEditService; @Inject SessionManager sessionManager; @Inject ContributionDao contributionDao; @Inject UploadClient uploadClient; @Inject MediaClient mediaClient; @Inject @Named(CommonsApplicationModule.MAIN_THREAD) Scheduler mainThreadScheduler; @Inject @Named(CommonsApplicationModule.IO_THREAD) Scheduler ioThreadScheduler; @Inject @Named("default_preferences") public JsonKvStore defaultKvStore; private NotificationManagerCompat notificationManager; private NotificationCompat.Builder curNotification; private int toUpload; private CompositeDisposable compositeDisposable; /** * The filePath names of unfinished uploads, used to prevent overwriting */ private Set<String> unfinishedUploads = new HashSet<>(); // DO NOT HAVE NOTIFICATION ID OF 0 FOR ANYTHING // See http://stackoverflow.com/questions/8725909/startforeground-does-not-show-my-notification // Seriously, Android? public static final int NOTIFICATION_UPLOAD_IN_PROGRESS = 1; public static final int NOTIFICATION_UPLOAD_FAILED = 3; public static final int NOTIFICATION_UPLOAD_PAUSED = 4; protected class NotificationUpdateProgressListener { String notificationTag; boolean notificationTitleChanged; Contribution contribution; String notificationProgressTitle; String notificationFinishingTitle; NotificationUpdateProgressListener(String notificationTag, String notificationProgressTitle, String notificationFinishingTitle, Contribution contribution) { this.notificationTag = notificationTag; this.notificationProgressTitle = notificationProgressTitle; this.notificationFinishingTitle = notificationFinishingTitle; this.contribution = contribution; } public void onProgress(long transferred, long total) { if (!notificationTitleChanged) { curNotification.setContentTitle(notificationProgressTitle); notificationTitleChanged = true; contribution.setState(Contribution.STATE_IN_PROGRESS); } if (transferred == total) { // Completed! curNotification.setContentTitle(notificationFinishingTitle) .setTicker(notificationFinishingTitle) .setProgress(0, 100, true); } else { curNotification .setProgress(100, (int) (((double) transferred / (double) total) * 100), false); } notificationManager .notify(notificationTag, NOTIFICATION_UPLOAD_IN_PROGRESS, curNotification.build()); contribution.setTransferred(transferred); compositeDisposable.add(contributionDao.update(contribution) .subscribeOn(ioThreadScheduler) .subscribe()); } public void onChunkUploaded(Contribution contribution, ChunkInfo chunkInfo) { contribution.setChunkInfo(chunkInfo); compositeDisposable.add(contributionDao.update(contribution) .subscribeOn(ioThreadScheduler) .subscribe()); } } /** * Sets contribution state to paused and disposes the active disposable * @param contribution */ public void pauseUpload(Contribution contribution) { uploadClient.pauseUpload(contribution.getPageId()); contribution.setState(Contribution.STATE_PAUSED); compositeDisposable.add(contributionDao.update(contribution) .subscribeOn(ioThreadScheduler) .subscribe()); } @Override public void onDestroy() { super.onDestroy(); compositeDisposable.dispose(); Timber.d("UploadService.onDestroy; %s are yet to be uploaded", unfinishedUploads); } public class UploadServiceLocalBinder extends Binder { public UploadService getService() { return UploadService.this; } } private final IBinder localBinder = new UploadServiceLocalBinder(); private PublishProcessor<Contribution> contributionsToUpload; @Override public IBinder onBind(Intent intent) { return localBinder; } @Override public void onCreate() { super.onCreate(); CommonsApplication.createNotificationChannel(getApplicationContext()); compositeDisposable = new CompositeDisposable(); notificationManager = NotificationManagerCompat.from(this); curNotification = getNotificationBuilder(CommonsApplication.NOTIFICATION_CHANNEL_ID_ALL); contributionsToUpload = PublishProcessor.create(); compositeDisposable.add(contributionsToUpload.subscribe(this::handleUpload)); } public void handleUpload(Contribution contribution) { contribution.setState(Contribution.STATE_QUEUED); contribution.setTransferred(0); toUpload++; if (curNotification != null && toUpload != 1) { curNotification.setContentText(getResources() .getQuantityString(R.plurals.uploads_pending_notification_indicator, toUpload, toUpload)); Timber.d("%d uploads left", toUpload); notificationManager .notify(contribution.getLocalUri().toString(), NOTIFICATION_UPLOAD_IN_PROGRESS, curNotification.build()); } compositeDisposable.add(contributionDao .save(contribution) .subscribeOn(ioThreadScheduler) .subscribe(() -> uploadContribution(contribution))); } private boolean freshStart = true; public void queue(Contribution contribution) { if (defaultKvStore .getBoolean(CommonsApplication.IS_LIMITED_CONNECTION_MODE_ENABLED, false)) { contribution.setState(Contribution.STATE_QUEUED_LIMITED_CONNECTION_MODE); contributionDao.save(contribution) .subscribeOn(ioThreadScheduler) .subscribe(); return; } contributionsToUpload.offer(contribution); } @Override public int onStartCommand(Intent intent, int flags, int startId) { showUploadNotification(); if (ACTION_START_SERVICE.equals(intent.getAction()) && freshStart) { compositeDisposable.add(contributionDao.updateStates(Contribution.STATE_FAILED, new int[]{Contribution.STATE_QUEUED, Contribution.STATE_IN_PROGRESS}) .observeOn(mainThreadScheduler) .subscribeOn(ioThreadScheduler) .subscribe()); freshStart = false; } else if (PROCESS_PENDING_LIMITED_CONNECTION_MODE_UPLOADS.equals(intent.getAction())) { contributionDao.getContribution(Contribution.STATE_QUEUED_LIMITED_CONNECTION_MODE) .flatMapObservable( (Function<List<Contribution>, ObservableSource<Contribution>>) contributions -> Observable .fromIterable(contributions)) .concatMapCompletable(contribution -> Completable.fromAction(() -> queue(contribution))) .subscribeOn(ioThreadScheduler) .subscribe(); } return START_REDELIVER_INTENT; } private void showUploadNotification() { compositeDisposable.add(contributionDao .getPendingUploads(new int[]{Contribution.STATE_IN_PROGRESS, Contribution.STATE_QUEUED}) .subscribe(count -> { if (count > 0) { startForeground(NOTIFICATION_UPLOAD_IN_PROGRESS, curNotification.setContentText(getText(R.string.starting_uploads)).build()); } })); } @SuppressLint("StringFormatInvalid") private NotificationCompat.Builder getNotificationBuilder(String channelId) { return new NotificationCompat.Builder(this, channelId) .setAutoCancel(true) .setSmallIcon(R.drawable.ic_launcher) .setLargeIcon(BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher)) .setAutoCancel(true) .setOnlyAlertOnce(true) .setProgress(100, 0, true) .setOngoing(true) .setContentIntent( PendingIntent.getActivity(this, 0, new Intent(this, MainActivity.class), 0)); } @SuppressLint("CheckResult") private void uploadContribution(Contribution contribution) { if (contribution.getLocalUri() == null || contribution.getLocalUri().getPath() == null) { Timber.d("localUri/path is null"); return; } String notificationTag = contribution.getLocalUri().toString(); Timber.d("Before execution!"); final Media media = contribution.getMedia(); final String displayTitle = media.getDisplayTitle(); curNotification.setContentTitle(getString(R.string.upload_progress_notification_title_start, displayTitle)) .setContentText(getResources() .getQuantityString(R.plurals.uploads_pending_notification_indicator, toUpload, toUpload)) .setTicker(getString(R.string.upload_progress_notification_title_in_progress, displayTitle)) .setOngoing(true); notificationManager .notify(notificationTag, NOTIFICATION_UPLOAD_IN_PROGRESS, curNotification.build()); String filename = media.getFilename(); NotificationUpdateProgressListener notificationUpdater = new NotificationUpdateProgressListener( notificationTag, getString(R.string.upload_progress_notification_title_in_progress, displayTitle), getString(R.string.upload_progress_notification_title_finishing, displayTitle), contribution ); Observable.fromCallable(() -> "Temp_" + contribution.hashCode() + filename) .flatMap(stashFilename -> uploadClient .uploadFileToStash(getApplicationContext(), stashFilename, contribution, notificationUpdater)) .subscribeOn(Schedulers.io()) .observeOn(Schedulers.io()) .doFinally(() -> { if (filename != null) { unfinishedUploads.remove(filename); } toUpload--; if (toUpload == 0) { // Sync modifications right after all uploads are processed ContentResolver .requestSync(sessionManager.getCurrentAccount(), BuildConfig.MODIFICATION_AUTHORITY, new Bundle()); stopForeground(true); } }) .flatMap(uploadStash -> { Timber.d("Upload stash result %s", uploadStash.toString()); notificationManager.cancel(notificationTag, NOTIFICATION_UPLOAD_IN_PROGRESS); if (uploadStash.getState() == StashUploadState.SUCCESS) { Timber.d("making sure of uniqueness of name: %s", filename); String uniqueFilename = findUniqueFilename(filename); unfinishedUploads.add(uniqueFilename); return uploadClient.uploadFileFromStash( getApplicationContext(), contribution, uniqueFilename, uploadStash.getFileKey()).doOnError(new Consumer<Throwable>() { @Override public void accept(Throwable throwable) throws Exception { Timber.e(throwable, "Error occurred in uploading file from stash"); if (STASH_ERROR_CODES.contains(throwable.getMessage())) { clearChunks(contribution); } } }); } else if (uploadStash.getState() == StashUploadState.PAUSED) { showPausedNotification(contribution); return Observable.never(); } else { Timber.d("Contribution upload failed. Wikidata entity won't be edited"); showFailedNotification(contribution); return Observable.never(); } }) .subscribe( uploadResult -> onUpload(contribution, notificationTag, uploadResult), throwable -> { Timber.w(throwable, "Exception during upload"); notificationManager.cancel(notificationTag, NOTIFICATION_UPLOAD_IN_PROGRESS); showFailedNotification(contribution); }); } private void clearChunks(Contribution contribution) { contribution.setChunkInfo(null); compositeDisposable.add(contributionDao.update(contribution) .subscribeOn(ioThreadScheduler) .subscribe()); } private void onUpload(Contribution contribution, String notificationTag, UploadResult uploadResult) { notificationManager.cancel(notificationTag, NOTIFICATION_UPLOAD_IN_PROGRESS); if (uploadResult.isSuccessful()) { onSuccessfulUpload(contribution, uploadResult); } else { Timber.d("Contribution upload failed. Wikidata entity won't be edited"); showFailedNotification(contribution); } } private void onSuccessfulUpload(Contribution contribution, UploadResult uploadResult) { compositeDisposable .add(wikidataEditService.addDepictionsAndCaptions(uploadResult, contribution)); WikidataPlace wikidataPlace = contribution.getWikidataPlace(); if (wikidataPlace != null && wikidataPlace.getImageValue() == null) { if (!contribution.hasInvalidLocation()) { wikidataEditService.createClaim(wikidataPlace, uploadResult.getFilename(), contribution.getMedia().getCaptions()); } else { ViewUtil.showShortToast(this, getString(R.string.wikidata_edit_failure)); Timber .d("Image location and nearby place location mismatched, so Wikidata item won't be edited"); } } saveCompletedContribution(contribution, uploadResult); } private void saveCompletedContribution(Contribution contribution, UploadResult uploadResult) { compositeDisposable.add(mediaClient.getMedia("File:" + uploadResult.getFilename()) .map(contribution::completeWith) .flatMapCompletable( newContribution -> { newContribution.setDateModified(new Date()); return contributionDao.saveAndDelete(contribution, newContribution); }) .subscribe()); } @SuppressLint("StringFormatInvalid") @SuppressWarnings("deprecation") private void showFailedNotification(final Contribution contribution) { final String displayTitle = contribution.getMedia().getDisplayTitle(); curNotification.setTicker(getString(R.string.upload_failed_notification_title, displayTitle)) .setContentTitle(getString(R.string.upload_failed_notification_title, displayTitle)) .setContentText(getString(R.string.upload_failed_notification_subtitle)) .setProgress(0, 0, false) .setOngoing(false); notificationManager.notify(contribution.getLocalUri().toString(), NOTIFICATION_UPLOAD_FAILED, curNotification.build()); contribution.setState(Contribution.STATE_FAILED); contribution.setChunkInfo(null); compositeDisposable.add(contributionDao .update(contribution) .subscribeOn(ioThreadScheduler) .subscribe()); } private void showPausedNotification(final Contribution contribution) { final String displayTitle = contribution.getMedia().getDisplayTitle(); curNotification.setTicker(getString(R.string.upload_paused_notification_title, displayTitle)) .setContentTitle(getString(R.string.upload_paused_notification_title, displayTitle)) .setContentText(getString(R.string.upload_paused_notification_subtitle)) .setProgress(0, 0, false) .setOngoing(false); notificationManager.notify(contribution.getLocalUri().toString(), NOTIFICATION_UPLOAD_PAUSED, curNotification.build()); contribution.setState(Contribution.STATE_PAUSED); compositeDisposable.add(contributionDao .update(contribution) .subscribeOn(ioThreadScheduler) .subscribe()); } private String findUniqueFilename(String fileName) throws IOException { String sequenceFileName; for (int sequenceNumber = 1; true; sequenceNumber++) { if (sequenceNumber == 1) { sequenceFileName = fileName; } else { if (fileName.indexOf('.') == -1) { // We really should have appended a filePath type suffix already. // But... we might not. sequenceFileName = fileName + " " + sequenceNumber; } else { Pattern regex = Pattern.compile("^(.*)(\\..+?)$"); Matcher regexMatcher = regex.matcher(fileName); sequenceFileName = regexMatcher.replaceAll("$1 " + sequenceNumber + "$2"); } } if (!mediaClient.checkPageExistsUsingTitle(String.format("File:%s", sequenceFileName)) .blockingGet() && !unfinishedUploads.contains(sequenceFileName)) { break; } } return sequenceFileName; } }
/* * Copyright 2010 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.core.base.evaluators; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import org.drools.core.base.BaseEvaluator; import org.drools.core.base.ValueType; import org.drools.core.common.InternalFactHandle; import org.drools.core.common.ReteEvaluator; import org.drools.core.rule.VariableRestriction.ObjectVariableContextEntry; import org.drools.core.rule.VariableRestriction.VariableContextEntry; import org.drools.core.spi.Evaluator; import org.drools.core.spi.FieldValue; import org.drools.core.spi.InternalReadAccessor; /** * This class defines the matches evaluator */ public class MatchesEvaluatorsDefinition implements EvaluatorDefinition { protected static final String matchesOp = "matches"; public static Operator MATCHES; public static Operator NOT_MATCHES; private static String[] SUPPORTED_IDS; { init(); } static void init() { if ( SUPPORTED_IDS == null ) { MATCHES = Operator.addOperatorToRegistry( matchesOp, false ); NOT_MATCHES = Operator.addOperatorToRegistry( matchesOp, true ); SUPPORTED_IDS = new String[] { matchesOp }; } } private EvaluatorCache evaluators = new EvaluatorCache() { private static final long serialVersionUID = 510l; { addEvaluator( ValueType.STRING_TYPE, MATCHES, StringMatchesEvaluator.INSTANCE ); addEvaluator( ValueType.OBJECT_TYPE, MATCHES, StringMatchesEvaluator.INSTANCE ); addEvaluator( ValueType.STRING_TYPE, NOT_MATCHES, StringNotMatchesEvaluator.INSTANCE ); addEvaluator( ValueType.OBJECT_TYPE, NOT_MATCHES, StringNotMatchesEvaluator.INSTANCE ); } }; public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { evaluators = (EvaluatorCache)in.readObject(); } public void writeExternal(ObjectOutput out) throws IOException { out.writeObject(evaluators); } /** * @inheridDoc */ public Evaluator getEvaluator(ValueType type, Operator operator) { return this.evaluators.getEvaluator( type, operator ); } /** * @inheridDoc */ public Evaluator getEvaluator(ValueType type, Operator operator, String parameterText) { return this.evaluators.getEvaluator( type, operator ); } public Evaluator getEvaluator(final ValueType type, final String operatorId, final boolean isNegated, final String parameterText) { return this.getEvaluator( type, operatorId, isNegated, parameterText, Target.FACT, Target.FACT ); } /** * @inheritDoc */ public Evaluator getEvaluator(final ValueType type, final String operatorId, final boolean isNegated, final String parameterText, final Target left, final Target right ) { return this.evaluators.getEvaluator( type, Operator.determineOperator( operatorId, isNegated ) ); } public String[] getEvaluatorIds() { return SUPPORTED_IDS; } public boolean isNegatable() { return true; } public Target getTarget() { return Target.FACT; } public boolean supportsType(ValueType type) { return this.evaluators.supportsType( type ); } /* ********************************************************* * Evaluator Implementations * ********************************************************* */ public static class StringMatchesEvaluator extends BaseEvaluator { private static final long serialVersionUID = 400L; public final static Evaluator INSTANCE = new StringMatchesEvaluator(); { MatchesEvaluatorsDefinition.init(); } public StringMatchesEvaluator() { super( ValueType.STRING_TYPE, MATCHES ); } public boolean evaluate(ReteEvaluator reteEvaluator, final InternalReadAccessor extractor, final InternalFactHandle handle1, final FieldValue fieldValue) { final String value1 = (String) extractor.getValue( reteEvaluator, handle1.getObject() ); final String value2 = (String) fieldValue.getValue(); if ( value1 == null ) { return false; } return value1.matches( value2 ); } public boolean evaluateCachedRight(ReteEvaluator reteEvaluator, final VariableContextEntry context, final InternalFactHandle left) { final String value = (String) ((ObjectVariableContextEntry) context).right; if ( value == null ) { return false; } return value.matches( (String) context.declaration.getExtractor().getValue( reteEvaluator, left.getObject() ) ); } public boolean evaluateCachedLeft(ReteEvaluator reteEvaluator, final VariableContextEntry context, final InternalFactHandle right) { final String value = (String) context.extractor.getValue( reteEvaluator, right.getObject() ); if ( value == null ) { return false; } return value.matches( (String) ((ObjectVariableContextEntry) context).left ); } public boolean evaluate(ReteEvaluator reteEvaluator, final InternalReadAccessor extractor1, final InternalFactHandle handle1, final InternalReadAccessor extractor2, final InternalFactHandle handle2) { final Object value1 = extractor1.getValue( reteEvaluator, handle1.getObject() ); final Object value2 = extractor2.getValue( reteEvaluator, handle2.getObject() ); if ( value1 == null ) { return false; } return ((String) value1).matches( (String) value2 ); } public String toString() { return "String matches"; } } public static class StringNotMatchesEvaluator extends BaseEvaluator { private static final long serialVersionUID = 400L; public final static Evaluator INSTANCE = new StringNotMatchesEvaluator(); { MatchesEvaluatorsDefinition.init(); } public StringNotMatchesEvaluator() { super( ValueType.STRING_TYPE, NOT_MATCHES ); } public boolean evaluate(ReteEvaluator reteEvaluator, final InternalReadAccessor extractor, final InternalFactHandle handle1, final FieldValue fieldValue) { final String value1 = (String) extractor.getValue( reteEvaluator, handle1.getObject() ); final String value2 = (String) fieldValue.getValue(); if ( value1 == null ) { return false; } return ! value1.matches( value2 ); } public boolean evaluateCachedRight(ReteEvaluator reteEvaluator, final VariableContextEntry context, final InternalFactHandle left) { final String value = (String) ((ObjectVariableContextEntry) context).right; if ( value == null ) { return false; } return ! value.matches( (String) context.declaration.getExtractor().getValue( reteEvaluator, left.getObject() ) ); } public boolean evaluateCachedLeft(ReteEvaluator reteEvaluator, final VariableContextEntry context, final InternalFactHandle right) { final String value = (String) context.extractor.getValue( reteEvaluator, right.getObject() ); if ( value == null ) { return false; } return ! value.matches( (String) ((ObjectVariableContextEntry) context).left ); } public boolean evaluate(ReteEvaluator reteEvaluator, final InternalReadAccessor extractor1, final InternalFactHandle handle1, final InternalReadAccessor extractor2, final InternalFactHandle handle2) { final Object value1 = extractor1.getValue( reteEvaluator, handle1.getObject() ); final Object value2 = extractor2.getValue( reteEvaluator, handle2.getObject() ); if ( value1 == null ) { return false; } return ! ((String) value1).matches( (String) value2 ); } public String toString() { return "String not matches"; } } }
package com.jetbrains.edu; import com.intellij.ide.SaveAndSyncHandler; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.Presentation; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Document; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.jetbrains.edu.courseFormat.AnswerPlaceholder; import com.jetbrains.edu.courseFormat.StudyItem; import com.jetbrains.edu.courseFormat.Task; import com.jetbrains.edu.courseFormat.TaskFile; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintWriter; import java.util.Collection; import java.util.Comparator; import java.util.Map; public class EduUtils { private EduUtils() { } private static final Logger LOG = Logger.getInstance(EduUtils.class.getName()); public static Comparator<StudyItem> INDEX_COMPARATOR = new Comparator<StudyItem>() { @Override public int compare(StudyItem o1, StudyItem o2) { return o1.getIndex() - o2.getIndex(); } }; public static void enableAction(@NotNull final AnActionEvent event, boolean isEnable) { final Presentation presentation = event.getPresentation(); presentation.setVisible(isEnable); presentation.setEnabled(isEnable); } /** * Gets number index in directory names like "task1", "lesson2" * * @param fullName full name of directory * @param logicalName part of name without index * @return index of object */ public static int getIndex(@NotNull final String fullName, @NotNull final String logicalName) { if (!fullName.startsWith(logicalName)) { return -1; } try { return Integer.parseInt(fullName.substring(logicalName.length())) - 1; } catch(NumberFormatException e) { return -1; } } public static boolean indexIsValid(int index, Collection collection) { int size = collection.size(); return index >= 0 && index < size; } @SuppressWarnings("IOResourceOpenedButNotSafelyClosed") @Nullable public static VirtualFile flushWindows(@NotNull final TaskFile taskFile, @NotNull final VirtualFile file, boolean useLength) { final VirtualFile taskDir = file.getParent(); VirtualFile fileWindows = null; final Document document = FileDocumentManager.getInstance().getDocument(file); if (document == null) { LOG.debug("Couldn't flush windows"); return null; } if (taskDir != null) { final String name = file.getNameWithoutExtension() + EduNames.WINDOWS_POSTFIX; deleteWindowsFile(taskDir, name); PrintWriter printWriter = null; try { fileWindows = taskDir.createChildData(taskFile, name); printWriter = new PrintWriter(new FileOutputStream(fileWindows.getPath())); for (AnswerPlaceholder answerPlaceholder : taskFile.getAnswerPlaceholders()) { if (!answerPlaceholder.isValid(document)) { printWriter.println("#educational_plugin_window = "); continue; } int start = answerPlaceholder.getRealStartOffset(document); int length = useLength ? answerPlaceholder.getLength() : answerPlaceholder.getPossibleAnswerLength(); final String windowDescription = document.getText(new TextRange(start, start + length)); printWriter.println("#educational_plugin_window = " + windowDescription); } ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { FileDocumentManager.getInstance().saveDocument(document); } }); } catch (IOException e) { LOG.error(e); } finally { if (printWriter != null) { printWriter.close(); } synchronize(); } } return fileWindows; } public static void synchronize() { FileDocumentManager.getInstance().saveAllDocuments(); SaveAndSyncHandler.getInstance().refreshOpenFiles(); VirtualFileManager.getInstance().refreshWithoutFileWatcher(true); } public static void createStudentFileFromAnswer(@NotNull final Project project, @NotNull final VirtualFile userFileDir, @NotNull final VirtualFile answerFileDir, @NotNull final Map.Entry<String, TaskFile> taskFileEntry) { final String name = taskFileEntry.getKey(); final TaskFile taskFile = taskFileEntry.getValue(); VirtualFile file = userFileDir.findChild(name); if (file != null) { try { file.delete(project); } catch (IOException e) { LOG.error(e); } } try { userFileDir.createChildData(project, name); } catch (IOException e) { LOG.error(e); } file = userFileDir.findChild(name); assert file != null; String answerFileName = file.getNameWithoutExtension() + ".answer." + file.getExtension(); VirtualFile answerFile = answerFileDir.findChild(answerFileName); if (answerFile == null) { return; } final Document answerDocument = FileDocumentManager.getInstance().getDocument(answerFile); if (answerDocument == null) { return; } final Document document = FileDocumentManager.getInstance().getDocument(file); if (document == null) return; CommandProcessor.getInstance().executeCommand(project, new Runnable() { @Override public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { document.replaceString(0, document.getTextLength(), answerDocument.getCharsSequence()); } }); } }, "Create Student File", "Create Student File"); EduDocumentListener listener = new EduDocumentListener(taskFile, false); document.addDocumentListener(listener); taskFile.sortAnswerPlaceholders(); for (int i = taskFile.getAnswerPlaceholders().size() - 1; i >= 0; i--) { final AnswerPlaceholder answerPlaceholder = taskFile.getAnswerPlaceholders().get(i); if (answerPlaceholder.getRealStartOffset(document) > document.getTextLength() || answerPlaceholder.getRealStartOffset(document) + answerPlaceholder.getPossibleAnswerLength() > document.getTextLength()) { LOG.error("Wrong startOffset: " + answerPlaceholder.getRealStartOffset(document) + "; document: " + file.getPath()); return; } replaceAnswerPlaceholder(project, document, answerPlaceholder); } CommandProcessor.getInstance().executeCommand(project, new Runnable() { @Override public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { FileDocumentManager.getInstance().saveDocument(document); } }); } }, "Create Student File", "Create Student File"); document.removeDocumentListener(listener); } private static void replaceAnswerPlaceholder(@NotNull final Project project, @NotNull final Document document, @NotNull final AnswerPlaceholder answerPlaceholder) { final String taskText = answerPlaceholder.getTaskText(); final int offset = answerPlaceholder.getRealStartOffset(document); CommandProcessor.getInstance().executeCommand(project, new Runnable() { @Override public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { document.replaceString(offset, offset + answerPlaceholder.getPossibleAnswerLength(), taskText); FileDocumentManager.getInstance().saveDocument(document); } }); } }, "Replace Answer Placeholders", "Replace Answer Placeholders"); } public static void deleteWindowDescriptions(@NotNull final Task task, @NotNull final VirtualFile taskDir) { for (Map.Entry<String, TaskFile> entry : task.getTaskFiles().entrySet()) { String name = entry.getKey(); VirtualFile virtualFile = taskDir.findChild(name); if (virtualFile == null) { continue; } String windowsFileName = virtualFile.getNameWithoutExtension() + EduNames.WINDOWS_POSTFIX; deleteWindowsFile(taskDir, windowsFileName); } } private static void deleteWindowsFile(@NotNull final VirtualFile taskDir, @NotNull final String name) { final VirtualFile fileWindows = taskDir.findChild(name); if (fileWindows != null && fileWindows.exists()) { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { try { fileWindows.delete(taskDir); } catch (IOException e) { LOG.warn("Tried to delete non existed _windows file"); } } }); } } }
/** *============================================================================ * The Ohio State University Research Foundation, Emory University, * the University of Minnesota Supercomputing Institute * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cagrid-grid-incubation/LICENSE.txt for details. *============================================================================ **/ /** *============================================================================ *============================================================================ **/ package org.cagrid.gaards.ui.csm.groups; import gov.nih.nci.cagrid.common.Runner; import gov.nih.nci.cagrid.common.Utils; import java.awt.CardLayout; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.util.List; import javax.swing.JButton; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTextField; import javax.swing.SwingUtilities; import org.cagrid.gaards.csm.bean.GroupSearchCriteria; import org.cagrid.gaards.csm.client.Application; import org.cagrid.gaards.csm.client.Group; import org.cagrid.gaards.csm.client.LocalGroup; import org.cagrid.gaards.csm.client.RemoteGroup; import org.cagrid.gaards.ui.common.ProgressPanel; import org.cagrid.grape.GridApplication; import org.cagrid.grape.LookAndFeel; import org.cagrid.grape.utils.ErrorDialog; public class GroupsPanel extends JPanel { private static final String LOCAL_GROUP = "LocalGroup"; private static final String REMOTE_GROUP = "RemoteGroup"; private static final long serialVersionUID = 1L; private JSplitPane jSplitPane = null; private JPanel leftPanel = null; private JPanel rightPanel = null; private JPanel searchPanel = null; private JLabel jLabel = null; private JTextField groupName = null; private JButton groupSearch = null; private JScrollPane jScrollPane = null; private JPanel groupsPanel = null; private GroupsList groups = null; private Application application; private ProgressPanel progress; private JPanel groupPanel = null; private LocalGroupPanel localGroup = null; private RemoteGroupPanel remoteGroup = null; private CardLayout groupContext; private JPanel membersPanel = null; private JScrollPane memberPane = null; private MembersTable members = null; private JPanel groupActionPanel = null; private JButton addGroup = null; private JButton linkRemoteGroup = null; private JButton removeGroup = null; private JButton unlinkRemoteGroup = null; private Group currentGroup; // @jve:decl-index=0: private boolean successfullSearch = false; private JPanel membersActionPanel = null; private JButton addMember = null; private JButton removeMember = null; /** * This is the default constructor */ public GroupsPanel(Application application, ProgressPanel progress) { super(); this.application = application; this.progress = progress; initialize(); enableGroupActions(); } private void disableAll() { getGroupSearch().setEnabled(false); getGroups().setEnabled(false); disableAllGroupActions(); } private void disableAllGroupActions() { getAddGroup().setEnabled(false); getRemoveGroup().setEnabled(false); getLinkRemoteGroup().setEnabled(false); getUnlinkRemoteGroup().setEnabled(false); getAddMember().setEnabled(false); getRemoveMember().setEnabled(false); } private void enableGroupActions() { getAddGroup().setEnabled(true); getLinkRemoteGroup().setEnabled(true); if (currentGroup != null) { if (currentGroup instanceof LocalGroup) { getUnlinkRemoteGroup().setEnabled(false); getRemoveGroup().setEnabled(true); getAddMember().setEnabled(true); getRemoveMember().setEnabled(true); } else if (currentGroup instanceof RemoteGroup) { getUnlinkRemoteGroup().setEnabled(true); getRemoveGroup().setEnabled(false); getAddMember().setEnabled(false); getRemoveMember().setEnabled(false); } } else { getUnlinkRemoteGroup().setEnabled(false); getRemoveGroup().setEnabled(false); getAddMember().setEnabled(false); getRemoveMember().setEnabled(false); } } private void enableAll() { getGroupSearch().setEnabled(true); getGroups().setEnabled(true); enableGroupActions(); } private void clearAllGroups(final boolean clearSelectedGroup) { SwingUtilities.invokeLater(new Runnable() { public void run() { getGroups().clearGroups(); if (clearSelectedGroup) { currentGroup = null; getLocalGroup().clearAll(); getRemoteGroup().clearAll(); getMembers().clearTable(); } } }); } public void setGroup(GroupEntry grp) { disableAll(); getLocalGroup().clearAll(); getRemoteGroup().clearAll(); currentGroup = grp.getGroup(); if (currentGroup instanceof LocalGroup) { getLocalGroup().setGroup((LocalGroup) currentGroup); this.groupContext.show(getGroupPanel(), LOCAL_GROUP); memberSearch(currentGroup); } else if (currentGroup instanceof RemoteGroup) { getRemoteGroup().setGroup((RemoteGroup) currentGroup); this.groupContext.show(getGroupPanel(), REMOTE_GROUP); memberSearch(currentGroup); } enableAll(); } private synchronized void memberSearch(Group grp) { this.progress.showProgress("Searching for members..."); getMembers().clearTable(); try { List<String> list = grp.getMembers(); for (int i = 0; i < list.size(); i++) { getMembers().addMember(list.get(i)); } this.progress.stopProgress(list.size() + " member(s) found."); } catch (Exception e) { ErrorDialog.showError(e); this.progress.stopProgress("Error"); } } private synchronized void groupSearch() { groupSearch(true); } private synchronized void groupSearch(boolean clearLocalGroup) { this.progress.showProgress("Searching..."); clearAllGroups(clearLocalGroup); try { GroupSearchCriteria search = new GroupSearchCriteria(); search.setApplicationId(this.application.getId()); search.setName(Utils.clean(getGroupName().getText())); final List<Group> list = this.application.getGroups(search); getGroups().setGroups(list); successfullSearch = true; this.progress.stopProgress(list.size() + " groups(s) found."); } catch (Exception e) { ErrorDialog.showError(e); this.progress.stopProgress("Error"); } finally { enableAll(); } } /** * This method initializes this * * @return void */ private void initialize() { GridBagConstraints gridBagConstraints = new GridBagConstraints(); gridBagConstraints.fill = GridBagConstraints.BOTH; gridBagConstraints.weighty = 1.0; gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.weightx = 1.0; this.setSize(700, 500); this.setLayout(new GridBagLayout()); this.add(getJSplitPane(), gridBagConstraints); } /** * This method initializes jSplitPane * * @return javax.swing.JSplitPane */ private JSplitPane getJSplitPane() { if (jSplitPane == null) { jSplitPane = new JSplitPane(); jSplitPane.setLeftComponent(getLeftPanel()); jSplitPane.setRightComponent(getRightPanel()); jSplitPane.setDividerLocation(300); } return jSplitPane; } /** * This method initializes leftPanel * * @return javax.swing.JPanel */ private JPanel getLeftPanel() { if (leftPanel == null) { GridBagConstraints gridBagConstraints9 = new GridBagConstraints(); gridBagConstraints9.gridx = 0; gridBagConstraints9.insets = new Insets(2, 2, 2, 2); gridBagConstraints9.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints9.weightx = 1.0D; gridBagConstraints9.gridy = 2; GridBagConstraints gridBagConstraints5 = new GridBagConstraints(); gridBagConstraints5.gridx = 0; gridBagConstraints5.fill = GridBagConstraints.BOTH; gridBagConstraints5.weightx = 1.0D; gridBagConstraints5.weighty = 1.0D; gridBagConstraints5.insets = new Insets(2, 2, 2, 2); gridBagConstraints5.gridy = 1; GridBagConstraints gridBagConstraints1 = new GridBagConstraints(); gridBagConstraints1.gridx = 0; gridBagConstraints1.anchor = GridBagConstraints.WEST; gridBagConstraints1.insets = new Insets(2, 2, 2, 2); gridBagConstraints1.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints1.weightx = 1.0D; gridBagConstraints1.gridy = 0; leftPanel = new JPanel(); leftPanel.setLayout(new GridBagLayout()); leftPanel.add(getSearchPanel(), gridBagConstraints1); leftPanel.add(getGroupsPanel(), gridBagConstraints5); leftPanel.add(getGroupActionPanel(), gridBagConstraints9); } return leftPanel; } /** * This method initializes rightPanel * * @return javax.swing.JPanel */ private JPanel getRightPanel() { if (rightPanel == null) { GridBagConstraints gridBagConstraints7 = new GridBagConstraints(); gridBagConstraints7.gridx = 0; gridBagConstraints7.insets = new Insets(2, 2, 2, 2); gridBagConstraints7.weightx = 1.0D; gridBagConstraints7.weighty = 1.0D; gridBagConstraints7.fill = GridBagConstraints.BOTH; gridBagConstraints7.gridy = 1; GridBagConstraints gridBagConstraints6 = new GridBagConstraints(); gridBagConstraints6.gridx = 0; gridBagConstraints6.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints6.insets = new Insets(2, 2, 2, 2); gridBagConstraints6.weightx = 1.0D; gridBagConstraints6.gridy = 0; rightPanel = new JPanel(); rightPanel.setLayout(new GridBagLayout()); rightPanel.add(getGroupPanel(), gridBagConstraints6); rightPanel.add(getMembersPanel(), gridBagConstraints7); } return rightPanel; } /** * This method initializes searchPanel * * @return javax.swing.JPanel */ private JPanel getSearchPanel() { if (searchPanel == null) { GridBagConstraints gridBagConstraints3 = new GridBagConstraints(); gridBagConstraints3.gridx = 0; gridBagConstraints3.insets = new Insets(2, 2, 2, 2); gridBagConstraints3.gridwidth = 2; gridBagConstraints3.gridy = 1; GridBagConstraints gridBagConstraints2 = new GridBagConstraints(); gridBagConstraints2.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints2.gridx = 1; gridBagConstraints2.gridy = 0; gridBagConstraints2.insets = new Insets(2, 2, 2, 2); gridBagConstraints2.weightx = 1.0; jLabel = new JLabel(); jLabel.setText("Name"); searchPanel = new JPanel(); searchPanel.setLayout(new GridBagLayout()); searchPanel.add(jLabel, new GridBagConstraints()); searchPanel.add(getGroupName(), gridBagConstraints2); searchPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Group Search", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, LookAndFeel.getPanelLabelColor())); searchPanel.add(getGroupSearch(), gridBagConstraints3); } return searchPanel; } /** * This method initializes groupName * * @return javax.swing.JTextField */ private JTextField getGroupName() { if (groupName == null) { groupName = new JTextField(); } return groupName; } /** * This method initializes groupSearch * * @return javax.swing.JButton */ private JButton getGroupSearch() { if (groupSearch == null) { groupSearch = new JButton(); groupSearch.setText("Search"); groupSearch.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { disableAll(); Runner runner = new Runner() { public void execute() { groupSearch(); } }; try { GridApplication.getContext().executeInBackground(runner); } catch (Exception t) { t.getMessage(); } } }); } return groupSearch; } /** * This method initializes jScrollPane * * @return javax.swing.JScrollPane */ private JScrollPane getJScrollPane() { if (jScrollPane == null) { jScrollPane = new JScrollPane(); jScrollPane.setViewportView(getGroups()); } return jScrollPane; } /** * This method initializes groupsPanel * * @return javax.swing.JPanel */ private JPanel getGroupsPanel() { if (groupsPanel == null) { GridBagConstraints gridBagConstraints4 = new GridBagConstraints(); gridBagConstraints4.fill = GridBagConstraints.BOTH; gridBagConstraints4.gridx = 0; gridBagConstraints4.gridy = 0; gridBagConstraints4.weightx = 1.0; gridBagConstraints4.weighty = 1.0; gridBagConstraints4.insets = new Insets(2, 2, 2, 2); groupsPanel = new JPanel(); groupsPanel.setLayout(new GridBagLayout()); groupsPanel.add(getJScrollPane(), gridBagConstraints4); groupsPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Groups", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, LookAndFeel.getPanelLabelColor())); } return groupsPanel; } /** * This method initializes groups * * @return javax.swing.JList */ private GroupsList getGroups() { if (groups == null) { groups = new GroupsList(); groups.addListSelectionListener(new GroupListener(this)); } return groups; } /** * This method initializes groupPanel * * @return javax.swing.JPanel */ private JPanel getGroupPanel() { if (groupPanel == null) { groupPanel = new JPanel(); groupContext = new CardLayout(); groupPanel.setLayout(groupContext); groupPanel.add(getLocalGroup(), getLocalGroup().getName()); groupPanel.add(getRemoteGroup(), getRemoteGroup().getName()); groupContext.show(groupPanel, LOCAL_GROUP); } return groupPanel; } /** * This method initializes localGroup * * @return javax.swing.JPanel */ private LocalGroupPanel getLocalGroup() { if (localGroup == null) { localGroup = new LocalGroupPanel(this); localGroup.setName(LOCAL_GROUP); } return localGroup; } /** * This method initializes remoteGroup * * @return javax.swing.JPanel */ private RemoteGroupPanel getRemoteGroup() { if (remoteGroup == null) { remoteGroup = new RemoteGroupPanel(); remoteGroup.setName(REMOTE_GROUP); } return remoteGroup; } /** * This method initializes membersPanel * * @return javax.swing.JPanel */ private JPanel getMembersPanel() { if (membersPanel == null) { GridBagConstraints gridBagConstraints14 = new GridBagConstraints(); gridBagConstraints14.gridx = 0; gridBagConstraints14.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints14.weightx = 1.0D; gridBagConstraints14.gridy = 1; GridBagConstraints gridBagConstraints8 = new GridBagConstraints(); gridBagConstraints8.fill = GridBagConstraints.BOTH; gridBagConstraints8.weighty = 1.0; gridBagConstraints8.gridx = 0; gridBagConstraints8.gridy = 0; gridBagConstraints8.weightx = 1.0; membersPanel = new JPanel(); membersPanel.setLayout(new GridBagLayout()); membersPanel.setBorder(javax.swing.BorderFactory.createTitledBorder(null, "Members", javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, null, LookAndFeel.getPanelLabelColor())); membersPanel.add(getMemberPane(), gridBagConstraints8); membersPanel.add(getMembersActionPanel(), gridBagConstraints14); } return membersPanel; } /** * This method initializes memberPane * * @return javax.swing.JScrollPane */ private JScrollPane getMemberPane() { if (memberPane == null) { memberPane = new JScrollPane(); memberPane.setViewportView(getMembers()); } return memberPane; } /** * This method initializes members * * @return javax.swing.JTable */ private MembersTable getMembers() { if (members == null) { members = new MembersTable(); } return members; } /** * This method initializes groupActionPanel * * @return javax.swing.JPanel */ private JPanel getGroupActionPanel() { if (groupActionPanel == null) { GridBagConstraints gridBagConstraints13 = new GridBagConstraints(); gridBagConstraints13.gridx = 1; gridBagConstraints13.insets = new Insets(2, 2, 2, 2); gridBagConstraints13.gridy = 1; GridBagConstraints gridBagConstraints12 = new GridBagConstraints(); gridBagConstraints12.insets = new Insets(2, 2, 2, 2); gridBagConstraints12.gridx = 0; gridBagConstraints12.gridy = 1; gridBagConstraints12.gridwidth = 1; GridBagConstraints gridBagConstraints11 = new GridBagConstraints(); gridBagConstraints11.insets = new Insets(2, 2, 2, 2); gridBagConstraints11.gridy = 0; gridBagConstraints11.gridx = 1; GridBagConstraints gridBagConstraints10 = new GridBagConstraints(); gridBagConstraints10.insets = new Insets(2, 2, 2, 2); gridBagConstraints10.gridy = 0; gridBagConstraints10.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints10.gridx = 0; groupActionPanel = new JPanel(); groupActionPanel.setLayout(new GridBagLayout()); groupActionPanel.add(getAddGroup(), gridBagConstraints10); groupActionPanel.add(getRemoveGroup(), gridBagConstraints11); groupActionPanel.add(getLinkRemoteGroup(), gridBagConstraints12); groupActionPanel.add(getUnlinkRemoteGroup(), gridBagConstraints13); } return groupActionPanel; } /** * This method initializes addGroup * * @return javax.swing.JButton */ private JButton getAddGroup() { if (addGroup == null) { addGroup = new JButton(); addGroup.setText("Create Group"); addGroup.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { CreateGroupWindow window = new CreateGroupWindow(application); window.setModal(true); GridApplication.getContext().showDialog(window); if (window.wasGroupCreated() && (successfullSearch)) { disableAll(); Runner runner = new Runner() { public void execute() { groupSearch(); } }; try { GridApplication.getContext().executeInBackground(runner); } catch (Exception t) { t.getMessage(); } } } }); } return addGroup; } /** * This method initializes linkRemoteGroup * * @return javax.swing.JButton */ private JButton getLinkRemoteGroup() { if (linkRemoteGroup == null) { linkRemoteGroup = new JButton(); linkRemoteGroup.setText("Link Group"); linkRemoteGroup.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { LinkRemoteGroupWindow window = new LinkRemoteGroupWindow(application); window.setModal(true); GridApplication.getContext().showDialog(window); if (window.wasRemoteGroupLinked() && (successfullSearch)) { disableAll(); Runner runner = new Runner() { public void execute() { groupSearch(); } }; try { GridApplication.getContext().executeInBackground(runner); } catch (Exception t) { t.getMessage(); } } } }); } return linkRemoteGroup; } /** * This method initializes removeGroup * * @return javax.swing.JButton */ private JButton getRemoveGroup() { if (removeGroup == null) { removeGroup = new JButton(); removeGroup.setText("Remove Group"); removeGroup.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { disableAll(); Runner runner = new Runner() { public void execute() { removeGroup(); } }; try { GridApplication.getContext().executeInBackground(runner); } catch (Exception t) { t.getMessage(); } } }); } return removeGroup; } private void removeGroup() { this.progress.showProgress("Removing group..."); try { GroupEntry entry = (GroupEntry) this.getGroups().getSelectedValue(); if (entry.getGroup() instanceof LocalGroup) { application.removeGroup((LocalGroup) entry.getGroup()); groupSearch(); this.progress.stopProgress("Successfully removed the group."); } else { this.progress.stopProgress("Cannot remove a remote group."); } } catch (Exception e) { ErrorDialog.showError(e); this.progress.stopProgress("Error"); } finally { enableAll(); } } private void unlinkRemoteGroup() { this.progress.showProgress("Unlinking group..."); try { GroupEntry entry = (GroupEntry) this.getGroups().getSelectedValue(); if (entry.getGroup() instanceof RemoteGroup) { application.unlinkRemoteGroup((RemoteGroup) entry.getGroup()); groupSearch(); this.progress.stopProgress("Successfully unlinked the remote group."); } else { this.progress.stopProgress("Cannot unlink a local group."); } } catch (Exception e) { ErrorDialog.showError(e); this.progress.stopProgress("Error"); } finally { enableAll(); } } /** * This method initializes unlinkRemoteGroup * * @return javax.swing.JButton */ private JButton getUnlinkRemoteGroup() { if (unlinkRemoteGroup == null) { unlinkRemoteGroup = new JButton(); unlinkRemoteGroup.setText("Unlink Group"); unlinkRemoteGroup.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { disableAll(); Runner runner = new Runner() { public void execute() { unlinkRemoteGroup(); } }; try { GridApplication.getContext().executeInBackground(runner); } catch (Exception t) { t.getMessage(); } } }); } return unlinkRemoteGroup; } protected void modifyGroup(LocalGroup grp) { disableAll(); this.progress.showProgress("Modifying Group..."); try { grp.modify(); groupSearch(false); this.progress.stopProgress("Successfully modified group."); } catch (Exception e) { ErrorDialog.showError(e); this.progress.stopProgress("Error"); } finally { enableAll(); } } /** * This method initializes membersActionPanel * * @return javax.swing.JPanel */ private JPanel getMembersActionPanel() { if (membersActionPanel == null) { membersActionPanel = new JPanel(); membersActionPanel.setLayout(new GridBagLayout()); membersActionPanel.add(getAddMember(), new GridBagConstraints()); membersActionPanel.add(getRemoveMember(), new GridBagConstraints()); } return membersActionPanel; } /** * This method initializes addMember * * @return javax.swing.JButton */ private JButton getAddMember() { if (addMember == null) { addMember = new JButton(); addMember.setText("Add"); addMember.setEnabled(false); addMember.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { if (currentGroup instanceof LocalGroup) { disableAll(); AddMemberWindow window = new AddMemberWindow((LocalGroup) currentGroup); window.setModal(true); GridApplication.getContext().showDialog(window); if (window.wasMemberAdded()) { Runner runner = new Runner() { public void execute() { memberSearch(currentGroup); enableAll(); progress.stopProgress("Successfully added member to the group."); } }; try { GridApplication.getContext().executeInBackground(runner); } catch (Exception t) { t.getMessage(); } } } } }); } return addMember; } /** * This method initializes removeMember * * @return javax.swing.JButton */ private JButton getRemoveMember() { if (removeMember == null) { removeMember = new JButton(); removeMember.setText("Remove"); removeMember.setEnabled(false); removeMember.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { if (currentGroup instanceof LocalGroup) { disableAll(); Runner runner = new Runner() { public void execute() { progress.showProgress("Removing member from group..."); try { LocalGroup local = (LocalGroup) currentGroup; local.removeMember(getMembers().getSelectedMember()); memberSearch(currentGroup); enableAll(); progress.stopProgress("Successfully removed a member from the group."); } catch (Exception e) { ErrorDialog.showError(e); progress.stopProgress("Error"); } finally { enableAll(); } } }; try { GridApplication.getContext().executeInBackground(runner); } catch (Exception t) { t.getMessage(); } } } }); } return removeMember; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.components.impl.stores; import com.intellij.notification.Notification; import com.intellij.notification.NotificationListener; import com.intellij.notification.NotificationType; import com.intellij.notification.NotificationsManager; import com.intellij.openapi.application.*; import com.intellij.openapi.components.*; import com.intellij.openapi.components.store.ReadOnlyModificationException; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileEditor.impl.LoadTextUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectBundle; import com.intellij.openapi.project.ex.ProjectEx; import com.intellij.openapi.startup.StartupManager; import com.intellij.openapi.util.JDOMUtil; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.ThrowableComputable; import com.intellij.openapi.util.io.BufferExposingByteArrayOutputStream; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.ArrayUtil; import com.intellij.util.LineSeparator; import com.intellij.util.SmartList; import com.intellij.util.SystemProperties; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.UIUtil; import org.jdom.Element; import org.jdom.Parent; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.annotations.TestOnly; import javax.swing.event.HyperlinkEvent; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.LinkedHashSet; import java.util.List; public class StorageUtil { static final Logger LOG = Logger.getInstance(StorageUtil.class); @TestOnly public static String DEBUG_LOG = null; private static final byte[] XML_PROLOG = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>".getBytes(CharsetToolkit.UTF8_CHARSET); private static final Pair<byte[], String> NON_EXISTENT_FILE_DATA = Pair.create(null, SystemProperties.getLineSeparator()); private StorageUtil() { } public static void checkUnknownMacros(@NotNull final ComponentManager componentManager, @NotNull final Project project) { Application application = ApplicationManager.getApplication(); if (!application.isHeadlessEnvironment() && !application.isUnitTestMode()) { // should be invoked last StartupManager.getInstance(project).runWhenProjectIsInitialized(new Runnable() { @Override public void run() { TrackingPathMacroSubstitutor substitutor = ComponentsPackage.getStateStore(componentManager).getStateStorageManager().getMacroSubstitutor(); if (substitutor != null) { notifyUnknownMacros(substitutor, project, null); } } }); } } public static void notifyUnknownMacros(@NotNull TrackingPathMacroSubstitutor substitutor, @NotNull final Project project, @Nullable final String componentName) { final LinkedHashSet<String> macros = new LinkedHashSet<String>(substitutor.getUnknownMacros(componentName)); if (macros.isEmpty()) { return; } UIUtil.invokeLaterIfNeeded(new Runnable() { @Override public void run() { List<String> notified = null; NotificationsManager manager = NotificationsManager.getNotificationsManager(); for (UnknownMacroNotification notification : manager.getNotificationsOfType(UnknownMacroNotification.class, project)) { if (notified == null) { notified = new SmartList<String>(); } notified.addAll(notification.getMacros()); } if (!ContainerUtil.isEmpty(notified)) { macros.removeAll(notified); } if (!macros.isEmpty()) { LOG.debug("Reporting unknown path macros " + macros + " in component " + componentName); String format = "<p><i>%s</i> %s undefined. <a href=\"define\">Fix it</a></p>"; String productName = ApplicationNamesInfo.getInstance().getProductName(); String content = String.format(format, StringUtil.join(macros, ", "), macros.size() == 1 ? "is" : "are") + "<br>Path variables are used to substitute absolute paths " + "in " + productName + " project files " + "and allow project file sharing in version control systems.<br>" + "Some of the files describing the current project settings contain unknown path variables " + "and " + productName + " cannot restore those paths."; new UnknownMacroNotification("Load Error", "Load error: undefined path variables", content, NotificationType.ERROR, new NotificationListener() { @Override public void hyperlinkUpdate(@NotNull Notification notification, @NotNull HyperlinkEvent event) { ((ProjectEx)project).checkUnknownMacros(true); } }, macros).notify(project); } } }); } @NotNull public static VirtualFile writeFile(@Nullable File file, @NotNull Object requestor, @Nullable VirtualFile virtualFile, @NotNull Element element, @NotNull LineSeparator lineSeparator, boolean prependXmlProlog) throws IOException { final VirtualFile result; if (file != null && (virtualFile == null || !virtualFile.isValid())) { result = getOrCreateVirtualFile(requestor, file); } else { result = virtualFile; assert result != null; } if (LOG.isDebugEnabled() || ApplicationManager.getApplication().isUnitTestMode()) { BufferExposingByteArrayOutputStream content = writeToBytes(element, lineSeparator.getSeparatorString()); if (isEqualContent(result, lineSeparator, content)) { throw new IllegalStateException("Content equals, but it must be handled not on this level: " + result.getName()); } else if (DEBUG_LOG != null && ApplicationManager.getApplication().isUnitTestMode()) { DEBUG_LOG = result.getPath() + ":\n" + content + "\nOld Content:\n" + LoadTextUtil.loadText(result) + "\n---------"; } } doWrite(requestor, result, element, lineSeparator, prependXmlProlog); return result; } private static void doWrite(@NotNull final Object requestor, @NotNull final VirtualFile file, @NotNull Object content, @NotNull final LineSeparator lineSeparator, final boolean prependXmlProlog) throws IOException { if (LOG.isDebugEnabled()) { LOG.debug("Save " + file.getPresentableUrl()); } AccessToken token = WriteAction.start(); try { OutputStream out = file.getOutputStream(requestor); try { if (prependXmlProlog) { out.write(XML_PROLOG); out.write(lineSeparator.getSeparatorBytes()); } if (content instanceof Element) { JDOMUtil.writeParent((Element)content, out, lineSeparator.getSeparatorString()); } else { ((BufferExposingByteArrayOutputStream)content).writeTo(out); } } finally { out.close(); } } catch (FileNotFoundException e) { // may be element is not long-lived, so, we must write it to byte array final BufferExposingByteArrayOutputStream byteArray = content instanceof Element ? writeToBytes((Element)content, lineSeparator.getSeparatorString()) : ((BufferExposingByteArrayOutputStream)content); throw new ReadOnlyModificationException(file, e, new StateStorage.SaveSession() { @Override public void save() throws IOException { doWrite(requestor, file, byteArray, lineSeparator, prependXmlProlog); } }); } finally { token.finish(); } } private static boolean isEqualContent(@NotNull VirtualFile result, @Nullable LineSeparator lineSeparatorIfPrependXmlProlog, @NotNull BufferExposingByteArrayOutputStream content) throws IOException { int headerLength = lineSeparatorIfPrependXmlProlog == null ? 0 : XML_PROLOG.length + lineSeparatorIfPrependXmlProlog.getSeparatorBytes().length; if (result.getLength() != (headerLength + content.size())) { return false; } byte[] oldContent = result.contentsToByteArray(); if (lineSeparatorIfPrependXmlProlog != null && (!ArrayUtil.startsWith(oldContent, XML_PROLOG) || !ArrayUtil.startsWith(oldContent, XML_PROLOG.length, lineSeparatorIfPrependXmlProlog.getSeparatorBytes()))) { return false; } for (int i = headerLength; i < oldContent.length; i++) { if (oldContent[i] != content.getInternalBuffer()[i - headerLength]) { return false; } } return true; } public static void deleteFile(@NotNull File file, @NotNull final Object requestor, @Nullable final VirtualFile virtualFile) throws IOException { if (virtualFile == null) { LOG.warn("Cannot find virtual file " + file.getAbsolutePath()); } if (virtualFile == null) { if (file.exists()) { FileUtil.delete(file); } } else if (virtualFile.exists()) { try { deleteFile(requestor, virtualFile); } catch (FileNotFoundException e) { throw new ReadOnlyModificationException(virtualFile, e, new StateStorage.SaveSession() { @Override public void save() throws IOException { deleteFile(requestor, virtualFile); } }); } } } public static void deleteFile(@NotNull Object requestor, @NotNull VirtualFile virtualFile) throws IOException { AccessToken token = WriteAction.start(); try { virtualFile.delete(requestor); } finally { token.finish(); } } @NotNull public static BufferExposingByteArrayOutputStream writeToBytes(@NotNull Parent element, @NotNull String lineSeparator) throws IOException { BufferExposingByteArrayOutputStream out = new BufferExposingByteArrayOutputStream(512); JDOMUtil.writeParent(element, out, lineSeparator); return out; } @NotNull public static VirtualFile getOrCreateVirtualFile(@Nullable final Object requestor, @NotNull final File file) throws IOException { VirtualFile virtualFile = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(file); if (virtualFile != null) { return virtualFile; } File absoluteFile = file.getAbsoluteFile(); FileUtil.createParentDirs(absoluteFile); File parentFile = absoluteFile.getParentFile(); // need refresh if the directory has just been created final VirtualFile parentVirtualFile = StringUtil.isEmpty(parentFile.getPath()) ? null : LocalFileSystem.getInstance().refreshAndFindFileByIoFile(parentFile); if (parentVirtualFile == null) { throw new IOException(ProjectBundle.message("project.configuration.save.file.not.found", parentFile)); } if (ApplicationManager.getApplication().isWriteAccessAllowed()) { return parentVirtualFile.createChildData(requestor, file.getName()); } return ApplicationManager.getApplication().runWriteAction(new ThrowableComputable<VirtualFile, IOException>() { @Override public VirtualFile compute() throws IOException { return parentVirtualFile.createChildData(requestor, file.getName()); } }); } /** * @return pair.first - file contents (null if file does not exist), pair.second - file line separators */ @NotNull public static Pair<byte[], String> loadFile(@Nullable final VirtualFile file) throws IOException { if (file == null || !file.exists()) { return NON_EXISTENT_FILE_DATA; } byte[] bytes = file.contentsToByteArray(); String lineSeparator = file.getDetectedLineSeparator(); if (lineSeparator == null) { lineSeparator = detectLineSeparators(CharsetToolkit.UTF8_CHARSET.decode(ByteBuffer.wrap(bytes)), null).getSeparatorString(); } return Pair.create(bytes, lineSeparator); } @NotNull public static LineSeparator detectLineSeparators(@NotNull CharSequence chars, @Nullable LineSeparator defaultSeparator) { for (int i = 0, n = chars.length(); i < n; i++) { char c = chars.charAt(i); if (c == '\r') { return LineSeparator.CRLF; } else if (c == '\n') { // if we are here, there was no \r before return LineSeparator.LF; } } return defaultSeparator == null ? LineSeparator.getSystemLineSeparator() : defaultSeparator; } public static void delete(@NotNull StreamProvider provider, @NotNull String fileSpec, @NotNull RoamingType type) { if (provider.isApplicable(fileSpec, type)) { provider.delete(fileSpec, type); } } /** * You must call {@link StreamProvider#isApplicable(String, com.intellij.openapi.components.RoamingType)} before */ public static void sendContent(@NotNull StreamProvider provider, @NotNull String fileSpec, @NotNull Element element, @NotNull RoamingType type) throws IOException { // we should use standard line-separator (\n) - stream provider can share file content on any OS BufferExposingByteArrayOutputStream content = writeToBytes(element, "\n"); provider.saveContent(fileSpec, content.getInternalBuffer(), content.size(), type); } public static boolean isProjectOrModuleFile(@NotNull String fileSpec) { return StoragePathMacros.PROJECT_FILE.equals(fileSpec) || fileSpec.startsWith(StoragePathMacros.PROJECT_CONFIG_DIR) || fileSpec.equals(StoragePathMacros.MODULE_FILE); } @NotNull public static VirtualFile getFile(@NotNull String fileName, @NotNull VirtualFile parent, @NotNull Object requestor) throws IOException { VirtualFile file = parent.findChild(fileName); if (file != null) { return file; } AccessToken token = WriteAction.start(); try { return parent.createChildData(requestor, fileName); } finally { token.finish(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.openstack.swift; import static org.testng.Assert.assertFalse; import static org.testng.Assert.assertTrue; import javax.ws.rs.core.MediaType; import org.jclouds.http.HttpRequest; import org.jclouds.http.HttpResponse; import org.jclouds.http.HttpResponseException; import org.jclouds.openstack.swift.internal.BaseSwiftExpectTest; import org.jclouds.openstack.swift.reference.SwiftHeaders; import org.testng.annotations.Test; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @Test(testName = "SwiftClientExpectTest") public class SwiftClientExpectTest extends BaseSwiftExpectTest<SwiftClient> { @Test public void testContainerExistsWhenResponseIs2xxReturnsTrue() { HttpRequest headContainer = HttpRequest.builder() .method("HEAD") .endpoint(swiftEndpointWithHostReplaced + "/foo") .addHeader("Accept", MediaType.WILDCARD) .addHeader("X-Auth-Token", authToken).build(); HttpResponse headContainerResponse = HttpResponse.builder().statusCode(200).build(); SwiftClient clientWhenContainerExists = requestsSendResponses(authRequest, authResponse, headContainer, headContainerResponse); assertTrue(clientWhenContainerExists.containerExists("foo")); } @Test public void testCreateContainerReturnStatus() { String containerName = "foo"; HttpRequest createContainerRequest = HttpRequest.builder() .method("PUT") .endpoint(swiftEndpointWithHostReplaced + "/" + containerName) .addHeader("X-Auth-Token", authToken).build(); HttpResponse createContainerResponse = HttpResponse.builder() .statusCode(201) .build(); SwiftClient clientWhenNonExistingContainer = requestsSendResponses( authRequest, authResponse, createContainerRequest, createContainerResponse); assertTrue(clientWhenNonExistingContainer.createContainer( containerName)); // Try creating the same container again. This should return a status // code of 202 as per the following: // http://docs.openstack.org/api/openstack-object-storage/1.0/content/create-container.html // http://docs.rackspace.com/files/api/v1/cf-devguide/content/Create_Container-d1e1694.html createContainerResponse = HttpResponse.builder().statusCode(202) .build(); SwiftClient clientWhenExistingContainer = requestsSendResponses( authRequest, authResponse, createContainerRequest, createContainerResponse); assertFalse(clientWhenExistingContainer.createContainer(containerName)); } @Test public void testContainerExistsWhenResponseIs404ReturnsFalse() { HttpRequest headContainer = HttpRequest.builder() .method("HEAD") .endpoint(swiftEndpointWithHostReplaced + "/foo") .addHeader("Accept", MediaType.WILDCARD) .addHeader("X-Auth-Token", authToken).build(); HttpResponse headContainerResponse = HttpResponse.builder().statusCode(404).build(); SwiftClient clientWhenContainerDoesntExist = requestsSendResponses(authRequest, authResponse, headContainer, headContainerResponse); assertFalse(clientWhenContainerDoesntExist.containerExists("foo")); } @Test public void testSetContainerMetadataWhenResponseIs2xxReturnsTrue() { HttpRequest setContainerMetadataRequest = HttpRequest.builder() .method("POST") .endpoint(swiftEndpointWithHostReplaced + "/foo") .addHeader(SwiftHeaders.CONTAINER_METADATA_PREFIX + "key", "value") .addHeader("X-Auth-Token", authToken).build(); HttpResponse setContainerMetadataResponse = HttpResponse.builder().statusCode(204).build(); SwiftClient clientSetContainerMetadata = requestsSendResponses(authRequest, authResponse, setContainerMetadataRequest, setContainerMetadataResponse); assertTrue(clientSetContainerMetadata.setContainerMetadata("foo", ImmutableMap.<String, String> of("key", "value"))); } @Test(expectedExceptions = HttpResponseException.class) public void testSetContainerMetadataWhenResponseIs400ThrowsException() { HttpRequest setContainerMetadataRequest = HttpRequest.builder() .method("POST") .endpoint(swiftEndpointWithHostReplaced + "/foo") .addHeader(SwiftHeaders.CONTAINER_METADATA_PREFIX, "value") .addHeader("X-Auth-Token", authToken).build(); HttpResponse setContainerMetadataResponse = HttpResponse.builder() .statusCode(400) .message("Metadata name cannot be empty").build(); SwiftClient clientSetContainerMetadata = requestsSendResponses(authRequest, authResponse, setContainerMetadataRequest, setContainerMetadataResponse); clientSetContainerMetadata.setContainerMetadata("foo", ImmutableMap.<String, String> of("", "value")); } @Test public void testSetContainerMetadataWhenResponseIs404ReturnsFalse() { HttpRequest setContainerMetadataRequest = HttpRequest.builder() .method("POST") .endpoint(swiftEndpointWithHostReplaced + "/foo") .addHeader(SwiftHeaders.CONTAINER_METADATA_PREFIX + "key", "value") .addHeader("X-Auth-Token", authToken).build(); HttpResponse setContainerMetadataResponse = HttpResponse.builder() .statusCode(404).build(); SwiftClient clientSetContainerMetadata = requestsSendResponses(authRequest, authResponse, setContainerMetadataRequest, setContainerMetadataResponse); assertFalse(clientSetContainerMetadata.setContainerMetadata("foo", ImmutableMap.<String, String> of("key", "value"))); } @Test public void testDeleteContainerMetadataWhenResponseIs2xxReturnsTrue() { HttpRequest deleteContainerMetadataRequest = HttpRequest.builder() .method("POST") .endpoint(swiftEndpointWithHostReplaced + "/foo") .addHeader(SwiftHeaders.CONTAINER_DELETE_METADATA_PREFIX + "bar", "") .addHeader("X-Auth-Token", authToken).build(); HttpResponse deleteContainerMetadataResponse = HttpResponse.builder().statusCode(204).build(); SwiftClient clientDeleteContainerMetadata = requestsSendResponses(authRequest, authResponse, deleteContainerMetadataRequest, deleteContainerMetadataResponse); assertTrue(clientDeleteContainerMetadata.deleteContainerMetadata("foo", ImmutableList.<String> of("bar"))); } @Test public void testDeleteContainerMetadataEmptyWhenResponseIs2xxReturnsTrue() { HttpRequest deleteContainerMetadataRequest = HttpRequest.builder() .method("POST") .endpoint(swiftEndpointWithHostReplaced + "/foo") .addHeader(SwiftHeaders.CONTAINER_DELETE_METADATA_PREFIX, "") .addHeader("X-Auth-Token", authToken).build(); HttpResponse deleteContainerMetadataResponse = HttpResponse.builder().statusCode(204).build(); SwiftClient clientDeleteContainerMetadata = requestsSendResponses(authRequest, authResponse, deleteContainerMetadataRequest, deleteContainerMetadataResponse); assertTrue(clientDeleteContainerMetadata.deleteContainerMetadata("foo", ImmutableList.<String> of(""))); } @Test public void testDeleteContainerMetadataWhenResponseIs404ReturnsFalse() { HttpRequest deleteContainerMetadataRequest = HttpRequest.builder() .method("POST") .endpoint(swiftEndpointWithHostReplaced + "/foo") .addHeader(SwiftHeaders.CONTAINER_DELETE_METADATA_PREFIX + "bar", "") .addHeader("X-Auth-Token", authToken).build(); HttpResponse deleteContainerMetadataResponse = HttpResponse.builder().statusCode(404).build(); SwiftClient clientDeleteContainerMetadata = requestsSendResponses(authRequest, authResponse, deleteContainerMetadataRequest, deleteContainerMetadataResponse); assertFalse(clientDeleteContainerMetadata.deleteContainerMetadata("foo", ImmutableList.<String> of("bar"))); } @Test public void testCopyObjectWhenResponseIs2xxReturnsTrue() { String sourceContainer = "bar"; String sourceObject = "foo.txt"; String sourcePath = "/" + sourceContainer + "/" + sourceObject; String destinationContainer = "foo"; String destinationObject = "bar.txt"; String destinationPath = "/" + destinationContainer + "/" + destinationObject; HttpRequest copyObjectRequest = HttpRequest.builder() .method("PUT") .endpoint(swiftEndpointWithHostReplaced + destinationPath) .addHeader(SwiftHeaders.OBJECT_COPY_FROM, sourcePath) .addHeader("X-Auth-Token", authToken).build(); HttpResponse copyObjectResponse = HttpResponse.builder().statusCode(201).build(); SwiftClient clientCopyObject = requestsSendResponses(authRequest, authResponse, copyObjectRequest, copyObjectResponse); assertTrue(clientCopyObject.copyObject(sourceContainer, sourceObject, destinationContainer, destinationObject)); } @Test(expectedExceptions = CopyObjectException.class) public void testCopyObjectWhenResponseIs404ThrowsException() { String sourceContainer = "bar"; String sourceObject = "foo.txt"; String sourcePath = "/" + sourceContainer + "/" + sourceObject; String destinationContainer = "foo"; String destinationObject = "bar.txt"; String destinationPath = "/" + destinationContainer + "/" + destinationObject; HttpRequest copyObjectRequest = HttpRequest.builder() .method("PUT") .endpoint(swiftEndpointWithHostReplaced + destinationPath) .addHeader(SwiftHeaders.OBJECT_COPY_FROM, sourcePath) .addHeader("X-Auth-Token", authToken).build(); HttpResponse copyObjectResponse = HttpResponse.builder().statusCode(404).build(); SwiftClient clientCopyObject = requestsSendResponses(authRequest, authResponse, copyObjectRequest, copyObjectResponse); assertTrue(clientCopyObject.copyObject(sourceContainer, sourceObject, destinationContainer, destinationObject)); } }
package net.sf.jabref.gui.entryeditor; import java.awt.AWTKeyStroke; import java.awt.BorderLayout; import java.awt.Component; import java.awt.Font; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Insets; import java.awt.KeyboardFocusManager; import java.awt.RenderingHints; import java.awt.event.ActionEvent; import java.awt.event.FocusAdapter; import java.awt.event.FocusEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.swing.AbstractAction; import javax.swing.Action; import javax.swing.ActionMap; import javax.swing.InputMap; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JLabel; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JPopupMenu; import javax.swing.JScrollPane; import javax.swing.JTabbedPane; import javax.swing.JTextArea; import javax.swing.JToolBar; import javax.swing.KeyStroke; import javax.swing.ScrollPaneConstants; import javax.swing.SwingConstants; import javax.swing.SwingUtilities; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.text.JTextComponent; import javafx.application.Platform; import javafx.embed.swing.JFXPanel; import javafx.scene.Scene; import javafx.scene.layout.StackPane; import net.sf.jabref.Globals; import net.sf.jabref.gui.BasePanel; import net.sf.jabref.gui.EntryContainer; import net.sf.jabref.gui.GUIGlobals; import net.sf.jabref.gui.IconTheme; import net.sf.jabref.gui.JabRefFrame; import net.sf.jabref.gui.OSXCompatibleToolbar; import net.sf.jabref.gui.actions.Actions; import net.sf.jabref.gui.contentselector.FieldContentSelector; import net.sf.jabref.gui.externalfiles.WriteXMPEntryEditorAction; import net.sf.jabref.gui.fieldeditors.FieldEditor; import net.sf.jabref.gui.fieldeditors.FieldEditorFocusListener; import net.sf.jabref.gui.fieldeditors.FileListEditor; import net.sf.jabref.gui.fieldeditors.JTextAreaWithHighlighting; import net.sf.jabref.gui.fieldeditors.TextField; import net.sf.jabref.gui.help.HelpAction; import net.sf.jabref.gui.importer.fetcher.EntryFetchers; import net.sf.jabref.gui.keyboard.KeyBinding; import net.sf.jabref.gui.menus.ChangeEntryTypeMenu; import net.sf.jabref.gui.mergeentries.EntryFetchAndMergeWorker; import net.sf.jabref.gui.specialfields.SpecialFieldUpdateListener; import net.sf.jabref.gui.undo.NamedCompound; import net.sf.jabref.gui.undo.UndoableChangeType; import net.sf.jabref.gui.undo.UndoableFieldChange; import net.sf.jabref.gui.undo.UndoableKeyChange; import net.sf.jabref.gui.undo.UndoableRemoveEntry; import net.sf.jabref.gui.util.component.CheckBoxMessage; import net.sf.jabref.gui.util.component.VerticalLabelUI; import net.sf.jabref.logic.TypedBibEntry; import net.sf.jabref.logic.autocompleter.AutoCompleter; import net.sf.jabref.logic.bibtex.BibEntryWriter; import net.sf.jabref.logic.bibtex.LatexFieldFormatter; import net.sf.jabref.logic.bibtexkeypattern.BibtexKeyPatternUtil; import net.sf.jabref.logic.help.HelpFile; import net.sf.jabref.logic.importer.EntryBasedFetcher; import net.sf.jabref.logic.importer.ParserResult; import net.sf.jabref.logic.importer.fileformat.BibtexParser; import net.sf.jabref.logic.l10n.Localization; import net.sf.jabref.logic.search.SearchQueryHighlightListener; import net.sf.jabref.logic.util.UpdateField; import net.sf.jabref.model.EntryTypes; import net.sf.jabref.model.FieldChange; import net.sf.jabref.model.database.BibDatabase; import net.sf.jabref.model.database.BibDatabaseMode; import net.sf.jabref.model.entry.BibEntry; import net.sf.jabref.model.entry.EntryConverter; import net.sf.jabref.model.entry.EntryType; import net.sf.jabref.model.entry.FieldName; import net.sf.jabref.model.entry.FieldProperty; import net.sf.jabref.model.entry.InternalBibtexFields; import net.sf.jabref.model.entry.MathSciNetId; import net.sf.jabref.model.entry.event.FieldChangedEvent; import net.sf.jabref.preferences.JabRefPreferences; import com.google.common.eventbus.Subscribe; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * GUI component that allows editing of the fields of a BibEntry (i.e. the * one that shows up, when you double click on an entry in the table) * <p> * It hosts the tabs (required, general, optional) and the buttons to the left. * <p> * EntryEditor also registers itself to the event bus, receiving * events whenever a field of the entry changes, enabling the text fields to * update themselves if the change is made from somewhere else. */ public class EntryEditor extends JPanel implements EntryContainer { private static final Log LOGGER = LogFactory.getLog(EntryEditor.class); /** A reference to the entry this object works on. */ private final BibEntry entry; /** The currently displayed type */ private final String displayedBibEntryType; /** The action concerned with closing the window. */ private final CloseAction closeAction = new CloseAction(); /** The action that deletes the current entry, and closes the editor. */ private final DeleteAction deleteAction = new DeleteAction(); /** The action for switching to the next entry. */ private final AbstractAction nextEntryAction = new NextEntryAction(); /** The action for switching to the previous entry. */ private final AbstractAction prevEntryAction = new PrevEntryAction(); /** The action concerned with storing a field value. */ private final StoreFieldAction storeFieldAction = new StoreFieldAction(); /** The action for switching to the next tab */ private final SwitchLeftAction switchLeftAction = new SwitchLeftAction(); /** The action for switching to the previous tab */ private final SwitchRightAction switchRightAction = new SwitchRightAction(); /** The action which generates a BibTeX key for this entry. */ private final GenerateKeyAction generateKeyAction = new GenerateKeyAction(); // UGLY HACK to have a pointer to the fileListEditor to call autoSetLinks() private FileListEditor fileListEditor; private final AutoLinkAction autoLinkAction = new AutoLinkAction(); private final AbstractAction writeXmp; private final SaveDatabaseAction saveDatabaseAction = new SaveDatabaseAction(); private final JPanel srcPanel = new JPanel(); private final JPanel relatedArticlePanel = new JPanel(); private EntryEditorTabRelatedArticles relatedArticlesTab; private JTextArea source; private final JTabbedPane tabbed = new JTabbedPane(); private final JabRefFrame frame; private final BasePanel panel; private final Set<FieldContentSelector> contentSelectors = new HashSet<>(); /** * This can be set to false to stop the source text area from getting updated. This is used in cases where the * source couldn't be parsed, and the user is given the option to edit it. */ private boolean updateSource = true; /** Indicates that we are about to go to the next or previous entry */ private boolean movingToDifferentEntry; private boolean validEntry = true; private final List<Object> tabs = new ArrayList<>(); private boolean lastFieldAccepted = true; /** * This indicates whether the last attempt at parsing the source was successful. It is used to determine whether * the dialog should close; it should stay open if the user received an error message about the source, * whatever he or she chose to do about it. */ private boolean lastSourceAccepted = true; /** This is used to prevent double updates after editing source. */ private String lastSourceStringAccepted; /** The index the source panel has in tabbed. */ private int sourceIndex = -1; private final HelpAction helpAction = new HelpAction(HelpFile.ENTRY_EDITOR, IconTheme.JabRefIcon.HELP.getIcon()); private final UndoAction undoAction = new UndoAction(); private final RedoAction redoAction = new RedoAction(); private final TabListener tabListener = new TabListener(); private final List<SearchQueryHighlightListener> searchListeners = new ArrayList<>(); public EntryEditor(JabRefFrame frame, BasePanel panel, BibEntry entry) { this.frame = frame; this.panel = panel; this.entry = entry; entry.registerListener(this); entry.registerListener(SpecialFieldUpdateListener.getInstance()); displayedBibEntryType = entry.getType(); writeXmp = new WriteXMPEntryEditorAction(panel, this); BorderLayout borderLayout = new BorderLayout(); setLayout(borderLayout); setupToolBar(); setupFieldPanels(); setupSourcePanel(); add(tabbed, BorderLayout.CENTER); tabbed.addChangeListener(tabListener); if (Globals.prefs.getBoolean(JabRefPreferences.DEFAULT_SHOW_SOURCE)) { tabbed.setSelectedIndex(sourceIndex); } updateAllFields(); if (this.fileListEditor != null){ this.fileListEditor.adjustColumnWidth(); } } private void setupFieldPanels() { tabbed.removeAll(); tabs.clear(); EntryType type = EntryTypes.getTypeOrDefault(entry.getType(), this.frame.getCurrentBasePanel().getBibDatabaseContext().getMode()); // required fields addRequiredTab(type); // optional fields Set<String> deprecatedFields = new HashSet<>(EntryConverter.FIELD_ALIASES_TEX_TO_LTX.keySet()); Set<String> usedOptionalFieldsDeprecated = new HashSet<>(deprecatedFields); if ((type.getOptionalFields() != null) && !type.getOptionalFields().isEmpty()) { if (!frame.getCurrentBasePanel().getBibDatabaseContext().isBiblatexMode()) { addOptionalTab(type); } else { addOptionalTab(type); deprecatedFields.add(FieldName.YEAR); deprecatedFields.add(FieldName.MONTH); List<String> secondaryOptionalFields = type.getSecondaryOptionalFields(); List<String> optionalFieldsNotPrimaryOrDeprecated = new ArrayList<>(secondaryOptionalFields); optionalFieldsNotPrimaryOrDeprecated.removeAll(deprecatedFields); // Get list of all optional fields of this entry and their aliases Set<String> optionalFieldsAndAliases = new HashSet<>(); for (String field : type.getOptionalFields()) { optionalFieldsAndAliases.add(field); if (EntryConverter.FIELD_ALIASES_LTX_TO_TEX.containsKey(field)) { optionalFieldsAndAliases.add(EntryConverter.FIELD_ALIASES_LTX_TO_TEX.get(field)); } } // Get all optional fields which are deprecated usedOptionalFieldsDeprecated.retainAll(optionalFieldsAndAliases); // Get other deprecated fields usedOptionalFieldsDeprecated.add(FieldName.MONTH); // Add tabs EntryEditorTab optPan2 = new EntryEditorTab(frame, panel, optionalFieldsNotPrimaryOrDeprecated, this, false, true, Localization.lang("Optional fields 2")); if (optPan2.fileListEditor != null) { fileListEditor = optPan2.fileListEditor; } tabbed.addTab(Localization.lang("Optional fields 2"), IconTheme.JabRefIcon.OPTIONAL.getSmallIcon(), optPan2.getPane(), Localization.lang("Show optional fields")); tabs.add(optPan2); if (!usedOptionalFieldsDeprecated.isEmpty()) { EntryEditorTab optPan3; optPan3 = new EntryEditorTab(frame, panel, new ArrayList<>(usedOptionalFieldsDeprecated), this, false, true, Localization.lang("Deprecated fields")); if (optPan3.fileListEditor != null) { fileListEditor = optPan3.fileListEditor; } tabbed.addTab(Localization.lang("Deprecated fields"), IconTheme.JabRefIcon.OPTIONAL.getSmallIcon(), optPan3.getPane(), Localization.lang("Show deprecated BibTeX fields")); tabs.add(optPan3); } } } // other fields List<String> displayedFields = type.getAllFields().stream().map(String::toLowerCase) .collect(Collectors.toList()); List<String> otherFields = entry.getFieldNames().stream().map(String::toLowerCase) .filter(f -> !displayedFields.contains(f)).collect(Collectors.toList()); if (!usedOptionalFieldsDeprecated.isEmpty()) { otherFields.removeAll(usedOptionalFieldsDeprecated); } otherFields.remove(BibEntry.KEY_FIELD); otherFields.removeAll(Globals.prefs.getCustomTabFieldNames()); if (!otherFields.isEmpty()) { addOtherTab(otherFields); } // general fields from preferences addGeneralTabs(); // special tabs (like MathSciNet Reviews) addSpecialTabs(); // source tab addSourceTab(); //related articles addRelatedArticlesTab(); } private void addGeneralTabs() { EntryEditorTabList tabList = Globals.prefs.getEntryEditorTabList(); for (int i = 0; i < tabList.getTabCount(); i++) { EntryEditorTab newTab = new EntryEditorTab(frame, panel, tabList.getTabFields(i), this, false, false, tabList.getTabName(i)); if (newTab.fileListEditor != null) { fileListEditor = newTab.fileListEditor; } tabbed.addTab(tabList.getTabName(i), newTab.getPane()); tabs.add(newTab); } } private void addSpecialTabs() { // MathSciNet Review entry.getField(FieldName.MR_NUMBER).ifPresent(mrNumberRaw -> { MathSciNetId mrNumber = MathSciNetId.fromString(mrNumberRaw); JFXPanel reviewPane = new JFXPanel(); tabbed.addTab(Localization.lang("MathSciNet Review"), reviewPane); tabs.add(reviewPane); // Execute on JavaFX Application Thread Platform.runLater(() -> { StackPane root = new MathSciNetPaneView(mrNumber).getPane(); reviewPane.setScene(new Scene(root)); }); }); } private void addSourceTab() { String panelName = Localization.lang("%0 source", panel.getBibDatabaseContext().getMode().getFormattedName()); String toolTip = Localization.lang("Show/edit %0 source", panel.getBibDatabaseContext().getMode().getFormattedName()); srcPanel.setName(panelName); tabbed.addTab(panelName, IconTheme.JabRefIcon.SOURCE.getSmallIcon(), srcPanel, toolTip); tabs.add(srcPanel); sourceIndex = tabs.size() - 1; srcPanel.setFocusCycleRoot(true); } private void addOtherTab(List<String> otherFields) { EntryEditorTab otherPanel = new EntryEditorTab(frame, panel, otherFields, this, false, false, Localization.lang("Other fields")); if (otherPanel.fileListEditor != null) { fileListEditor = otherPanel.fileListEditor; } tabbed.addTab(Localization.lang("Other fields"), IconTheme.JabRefIcon.OPTIONAL.getSmallIcon(), otherPanel .getPane(), Localization.lang("Show remaining fields")); tabs.add(otherPanel); } private List<String> addRequiredTab(EntryType type) { List<String> requiredFields = type.getRequiredFieldsFlat(); EntryEditorTab requiredPanel = new EntryEditorTab(frame, panel, requiredFields, this, true, false, Localization.lang("Required fields")); if (requiredPanel.fileListEditor != null) { fileListEditor = requiredPanel.fileListEditor; } tabbed.addTab(Localization.lang("Required fields"), IconTheme.JabRefIcon.REQUIRED.getSmallIcon(), requiredPanel .getPane(), Localization.lang("Show required fields")); tabs.add(requiredPanel); return requiredFields; } /** * Creates the related Article Tab */ private void addRelatedArticlesTab() { relatedArticlePanel.setName(Localization.lang("Related articles")); relatedArticlePanel.setLayout(new BorderLayout()); relatedArticlesTab = new EntryEditorTabRelatedArticles(entry); JScrollPane relatedArticleScrollPane = new JScrollPane(relatedArticlesTab, ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); relatedArticlePanel.add(relatedArticleScrollPane, BorderLayout.CENTER); tabbed.addTab(Localization.lang("Related articles"), IconTheme.getImage("mdl"), relatedArticlePanel, Localization.lang("Related articles")); tabs.add(relatedArticlePanel); relatedArticlePanel.setFocusCycleRoot(true); } private void addOptionalTab(EntryType type) { EntryEditorTab optionalPanel = new EntryEditorTab(frame, panel, type.getPrimaryOptionalFields(), this, false, true, Localization.lang("Optional fields")); if (optionalPanel.fileListEditor != null) { fileListEditor = optionalPanel.fileListEditor; } tabbed.addTab(Localization.lang("Optional fields"), IconTheme.JabRefIcon.OPTIONAL.getSmallIcon(), optionalPanel .getPane(), Localization.lang("Show optional fields")); tabs.add(optionalPanel); } public String getDisplayedBibEntryType() { return displayedBibEntryType; } /** * @return reference to the currently edited entry */ @Override public BibEntry getEntry() { return entry; } public BibDatabase getDatabase() { return panel.getDatabase(); } private void setupToolBar() { JPanel leftPan = new JPanel(); leftPan.setLayout(new BorderLayout()); JToolBar toolBar = new OSXCompatibleToolbar(SwingConstants.VERTICAL); toolBar.setBorder(null); toolBar.setRollover(true); toolBar.setMargin(new Insets(0, 0, 0, 2)); // The toolbar carries all the key bindings that are valid for the whole window. ActionMap actionMap = toolBar.getActionMap(); InputMap inputMap = toolBar.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.CLOSE_ENTRY_EDITOR), "close"); actionMap.put("close", closeAction); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.ENTRY_EDITOR_STORE_FIELD), "store"); actionMap.put("store", getStoreFieldAction()); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.AUTOGENERATE_BIBTEX_KEYS), "generateKey"); actionMap.put("generateKey", getGenerateKeyAction()); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.AUTOMATICALLY_LINK_FILES), "autoLink"); actionMap.put("autoLink", autoLinkAction); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.ENTRY_EDITOR_PREVIOUS_ENTRY), "prev"); actionMap.put("prev", getPrevEntryAction()); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.ENTRY_EDITOR_NEXT_ENTRY), "next"); actionMap.put("next", getNextEntryAction()); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.UNDO), "undo"); actionMap.put("undo", undoAction); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.REDO), "redo"); actionMap.put("redo", redoAction); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.HELP), "help"); actionMap.put("help", getHelpAction()); toolBar.setFloatable(false); // Add actions (and thus buttons) JButton closeBut = new JButton(closeAction); closeBut.setText(null); closeBut.setBorder(null); closeBut.setMargin(new Insets(8, 0, 8, 0)); leftPan.add(closeBut, BorderLayout.NORTH); // Create type-label TypedBibEntry typedEntry = new TypedBibEntry(entry, panel.getBibDatabaseContext().getMode()); leftPan.add(new TypeLabel(typedEntry.getTypeForDisplay()), BorderLayout.CENTER); TypeButton typeButton = new TypeButton(); toolBar.add(typeButton); toolBar.add(getGenerateKeyAction()); toolBar.add(autoLinkAction); toolBar.add(writeXmp); JPopupMenu fetcherPopup = new JPopupMenu(); for(EntryBasedFetcher fetcher : EntryFetchers.getEntryBasedFetchers(Globals.prefs.getImportFormatPreferences())) { fetcherPopup.add(new JMenuItem(new AbstractAction(fetcher.getName()) { @Override public void actionPerformed(ActionEvent e) { new EntryFetchAndMergeWorker(panel, getEntry(), fetcher).execute(); } })); } JButton fetcherButton = new JButton(IconTheme.JabRefIcon.REFRESH.getIcon()); fetcherButton.setToolTipText(Localization.lang("Update with bibliographic information from the web")); fetcherButton.addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { fetcherPopup.show(e.getComponent(), e.getX(), e.getY()); } }); toolBar.add(fetcherButton); toolBar.addSeparator(); toolBar.add(deleteAction); toolBar.add(getPrevEntryAction()); toolBar.add(getNextEntryAction()); toolBar.addSeparator(); toolBar.add(getHelpAction()); Component[] comps = toolBar.getComponents(); for (Component comp : comps) { ((JComponent) comp).setOpaque(false); } leftPan.add(toolBar, BorderLayout.SOUTH); add(leftPan, BorderLayout.WEST); } /** * Rebuild the field tabs. This is called e.g. when a new content selector * has been added. */ public void rebuildPanels() { // Remove change listener, because the rebuilding causes meaningless // events and trouble: tabbed.removeChangeListener(tabListener); setupFieldPanels(); // Add the change listener again: tabbed.addChangeListener(tabListener); revalidate(); repaint(); } /** * getExtra checks the field name against InternalBibtexFields.getFieldExtras(name). * If the name has an entry, the proper component to be shown is created and * returned. Otherwise, null is returned. In addition, e.g. listeners can be * added to the field editor, even if no component is returned. * * @param editor Field editor * @return Component to show, or null if none. */ public Optional<JComponent> getExtra(final FieldEditor editor) { final String fieldName = editor.getFieldName(); final Set<FieldProperty> fieldExtras = InternalBibtexFields.getFieldProperties(fieldName); // timestamp or a other field with datepicker command if (Globals.prefs.get(JabRefPreferences.TIME_STAMP_FIELD).equals(fieldName) || fieldExtras.contains(FieldProperty.DATE)) { // double click AND datefield => insert the current date (today) return FieldExtraComponents.getDateTimeExtraComponent(editor, fieldExtras.contains(FieldProperty.DATE), fieldExtras.contains(FieldProperty.ISO_DATE)); } else if (fieldExtras.contains(FieldProperty.EXTERNAL)) { return FieldExtraComponents.getExternalExtraComponent(panel, editor); } else if (fieldExtras.contains(FieldProperty.JOURNAL_NAME)) { // Add controls for switching between abbreviated and full journal names. // If this field also has a FieldContentSelector, we need to combine these. return FieldExtraComponents.getJournalExtraComponent(frame, panel, editor, entry, contentSelectors, getStoreFieldAction()); } else if (!panel.getBibDatabaseContext().getMetaData().getContentSelectorValuesForField(fieldName).isEmpty()) { return FieldExtraComponents.getSelectorExtraComponent(frame, panel, editor, contentSelectors, getStoreFieldAction()); } else if (fieldExtras.contains(FieldProperty.DOI)) { return FieldExtraComponents.getDoiExtraComponent(panel, this, editor); } else if (fieldExtras.contains(FieldProperty.EPRINT)) { return FieldExtraComponents.getEprintExtraComponent(panel, this, editor); } else if (fieldExtras.contains(FieldProperty.ISBN)) { return FieldExtraComponents.getIsbnExtraComponent(panel, this, editor); } else if (fieldExtras.contains(FieldProperty.OWNER)) { return FieldExtraComponents.getSetOwnerExtraComponent(editor, getStoreFieldAction()); } else if (fieldExtras.contains(FieldProperty.YES_NO)) { return FieldExtraComponents.getYesNoExtraComponent(editor, this); } else if (fieldExtras.contains(FieldProperty.MONTH)) { return FieldExtraComponents.getMonthExtraComponent(editor, this, frame.getCurrentBasePanel().getBibDatabaseContext().getMode()); } else if (fieldExtras.contains(FieldProperty.GENDER)) { return FieldExtraComponents.getGenderExtraComponent(editor, this); } else if (fieldExtras.contains(FieldProperty.EDITOR_TYPE)) { return FieldExtraComponents.getEditorTypeExtraComponent(editor, this); } else if (fieldExtras.contains(FieldProperty.PAGINATION)) { return FieldExtraComponents.getPaginationExtraComponent(editor, this); } else if (fieldExtras.contains(FieldProperty.TYPE)) { return FieldExtraComponents.getTypeExtraComponent(editor, this, "patent".equalsIgnoreCase(entry.getType())); } return Optional.empty(); } private void setupSourcePanel() { source = new JTextAreaWithHighlighting(); addSearchListener((SearchQueryHighlightListener) source); source.setEditable(true); source.setLineWrap(true); source.addFocusListener(new FieldEditorFocusListener()); // Add the global focus listener, so a menu item can see if this field was focused when an action was called. source.addFocusListener(Globals.getFocusListener()); source.setFont(new Font("Monospaced", Font.PLAIN, Globals.prefs.getInt(JabRefPreferences.FONT_SIZE))); setupJTextComponent(source); updateSource(); JScrollPane scrollPane = new JScrollPane(source, ScrollPaneConstants.VERTICAL_SCROLLBAR_AS_NEEDED, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); srcPanel.setLayout(new BorderLayout()); srcPanel.add(scrollPane, BorderLayout.CENTER); } void addSearchListener(SearchQueryHighlightListener listener) { searchListeners.add(listener); panel.frame().getGlobalSearchBar().getSearchQueryHighlightObservable().addSearchListener(listener); } private void removeSearchListeners() { for (SearchQueryHighlightListener listener : searchListeners) { panel.frame().getGlobalSearchBar().getSearchQueryHighlightObservable().removeSearchListener(listener); } } public void updateSource() { if (updateSource) { try { String srcString = getSourceString(entry, panel.getBibDatabaseContext().getMode()); source.setText(srcString); lastSourceStringAccepted = srcString; // Set the current Entry to be selected. // Fixes the bug of losing selection after, e.g. an autogeneration of a BibTeX key. panel.highlightEntry(entry); } catch (IOException ex) { source.setText(ex.getMessage() + "\n\n" + Localization.lang("Correct the entry, and reopen editor to display/edit source.")); source.setEditable(false); LOGGER.debug("Incorrect entry", ex); } } } private static String getSourceString(BibEntry entry, BibDatabaseMode type) throws IOException { StringWriter stringWriter = new StringWriter(200); LatexFieldFormatter formatter = LatexFieldFormatter .buildIgnoreHashes(Globals.prefs.getLatexFieldFormatterPreferences()); new BibEntryWriter(formatter, false).writeWithoutPrependedNewlines(entry, stringWriter, type); return stringWriter.getBuffer().toString(); } /** * NOTE: This method is only used for the source panel, not for the * other tabs. Look at EntryEditorTab for the setup of text components * in the other tabs. */ private void setupJTextComponent(JTextComponent textComponent) { // Set up key bindings and focus listener for the FieldEditor. InputMap inputMap = textComponent.getInputMap(JComponent.WHEN_FOCUSED); ActionMap actionMap = textComponent.getActionMap(); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.ENTRY_EDITOR_STORE_FIELD), "store"); actionMap.put("store", getStoreFieldAction()); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.ENTRY_EDITOR_NEXT_PANEL), "right"); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.ENTRY_EDITOR_NEXT_PANEL_2), "right"); actionMap.put("right", getSwitchRightAction()); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.ENTRY_EDITOR_PREVIOUS_PANEL), "left"); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.ENTRY_EDITOR_PREVIOUS_PANEL_2), "left"); actionMap.put("left", getSwitchLeftAction()); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.HELP), "help"); actionMap.put("help", getHelpAction()); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.NEXT_TAB), "nexttab"); actionMap.put("nexttab", frame.nextTab); inputMap.put(Globals.getKeyPrefs().getKey(KeyBinding.PREVIOUS_TAB), "prevtab"); actionMap.put("prevtab", frame.prevTab); Set<AWTKeyStroke> keys = new HashSet<>( textComponent.getFocusTraversalKeys(KeyboardFocusManager.FORWARD_TRAVERSAL_KEYS)); keys.clear(); keys.add(AWTKeyStroke.getAWTKeyStroke("pressed TAB")); textComponent.setFocusTraversalKeys(KeyboardFocusManager.FORWARD_TRAVERSAL_KEYS, keys); keys = new HashSet<>(textComponent .getFocusTraversalKeys(KeyboardFocusManager.BACKWARD_TRAVERSAL_KEYS)); keys.clear(); keys.add(KeyStroke.getKeyStroke("shift pressed TAB")); textComponent.setFocusTraversalKeys(KeyboardFocusManager.BACKWARD_TRAVERSAL_KEYS, keys); textComponent.addFocusListener(new FieldListener()); } @Override public void requestFocus() { activateVisible(); } private void activateVisible() { Object activeTab = tabs.get(tabbed.getSelectedIndex()); if (activeTab instanceof EntryEditorTab) { ((EntryEditorTab) activeTab).focus(); } else { source.requestFocus(); } } /** * Reports the enabled status of the editor, as set by setEnabled() */ @Override public boolean isEnabled() { return source.isEnabled(); } /** * Sets the enabled status of all text fields of the entry editor. */ @Override public void setEnabled(boolean enabled) { for (Object tab : tabs) { if (tab instanceof EntryEditorTab) { ((EntryEditorTab) tab).setEnabled(enabled); } } source.setEnabled(enabled); } /** * Makes sure the current edit is stored. */ public void storeCurrentEdit() { Component comp = Globals.getFocusListener().getFocused(); if (Objects.equals(comp, source) || ((comp instanceof FieldEditor) && this.isAncestorOf(comp))) { if (comp instanceof FieldEditor) { ((FieldEditor) comp).clearAutoCompleteSuggestion(); } getStoreFieldAction().actionPerformed(new ActionEvent(comp, 0, "")); } } /** * Returns the name of the currently selected component. */ public String getVisiblePanelName() { return tabbed.getSelectedComponent().getName(); } public void setVisiblePanel(String name) { for (int i = 0; i < tabbed.getTabCount(); ++i) { if ((tabbed.getComponent(i).getName() != null) && tabbed.getComponent(i).getName().equals(name)) { tabbed.setSelectedIndex(i); return; } } if (tabbed.getTabCount() > 0) { tabbed.setSelectedIndex(0); } } public void setFocusToField(String fieldName) { for (Object tab : tabs) { if ((tab instanceof EntryEditorTab) && ((EntryEditorTab) tab).getFields().contains(fieldName)) { EntryEditorTab entryEditorTab = (EntryEditorTab) tab; setVisiblePanel(entryEditorTab.getTabTitle()); entryEditorTab.setActive(fieldName); entryEditorTab.focus(); } } } private boolean storeSource() { BibtexParser bibtexParser = new BibtexParser(Globals.prefs.getImportFormatPreferences()); try { ParserResult parserResult = bibtexParser.parse(new StringReader(source.getText())); BibDatabase database = parserResult.getDatabase(); if (database.getEntryCount() > 1) { throw new IllegalStateException("More than one entry found."); } if (!database.hasEntries()) { if (parserResult.hasWarnings()) { // put the warning into as exception text -> it will be displayed to the user throw new IllegalStateException(parserResult.warnings().get(0)); } else { throw new IllegalStateException("No entries found."); } } NamedCompound compound = new NamedCompound(Localization.lang("source edit")); BibEntry newEntry = database.getEntries().get(0); String newKey = newEntry.getCiteKeyOptional().orElse(null); boolean entryChanged = false; boolean emptyWarning = (newKey == null) || newKey.isEmpty(); if (newKey != null) { entry.setCiteKey(newKey); } else { entry.clearCiteKey(); } // First, remove fields that the user has removed. for (Entry<String, String> field : entry.getFieldMap().entrySet()) { String fieldName = field.getKey(); String fieldValue = field.getValue(); if (InternalBibtexFields.isDisplayableField(fieldName) && !newEntry.hasField(fieldName)) { compound.addEdit( new UndoableFieldChange(entry, fieldName, fieldValue, null)); entry.clearField(fieldName); entryChanged = true; } } // Then set all fields that have been set by the user. for (Entry<String, String> field : newEntry.getFieldMap().entrySet()) { String fieldName = field.getKey(); String oldValue = entry.getField(fieldName).orElse(null); String newValue = field.getValue(); if (!Objects.equals(oldValue, newValue)) { // Test if the field is legally set. new LatexFieldFormatter(Globals.prefs.getLatexFieldFormatterPreferences()) .format(newValue, fieldName); compound.addEdit(new UndoableFieldChange(entry, fieldName, oldValue, newValue)); entry.setField(fieldName, newValue); entryChanged = true; } } // See if the user has changed the entry type: if (!Objects.equals(newEntry.getType(), entry.getType())) { compound.addEdit(new UndoableChangeType(entry, entry.getType(), newEntry.getType())); entry.setType(newEntry.getType()); entryChanged = true; } compound.end(); if (!entryChanged) { return true; } panel.getUndoManager().addEdit(compound); if (panel.getDatabase().getDuplicationChecker().isDuplicateCiteKeyExisting(entry)) { warnDuplicateBibtexkey(); } else if (emptyWarning) { warnEmptyBibtexkey(); } else { panel.output(Localization.lang("Stored entry") + '.'); } lastSourceStringAccepted = source.getText(); // Update UI // TODO: we need to repaint the entryeditor if fields that are not displayed have been added panel.updateEntryEditorIfShowing(); lastSourceAccepted = true; updateSource = true; // TODO: does updating work properly after source stored? panel.markBaseChanged(); panel.highlightEntry(entry); return true; } catch (IllegalStateException | IOException ex) { // The source couldn't be parsed, so the user is given an // error message, and the choice to keep or revert the contents // of the source text field. updateSource = false; lastSourceAccepted = false; tabbed.setSelectedComponent(srcPanel); Object[] options = {Localization.lang("Edit"), Localization.lang("Revert to original source")}; if (!SwingUtilities.isEventDispatchThread()) { int answer = JOptionPane.showOptionDialog(frame, Localization.lang("Error") + ": " + ex.getMessage(), Localization.lang("Problem with parsing entry"), JOptionPane.YES_NO_OPTION, JOptionPane.ERROR_MESSAGE, null, options, options[0]); if (answer != 0) { updateSource = true; lastSourceAccepted = true; updateSource(); } } LOGGER.debug("Incorrect source", ex); return false; } } private void setField(String fieldName, String newFieldData) { for (Object tab : tabs) { if (tab instanceof EntryEditorTab) { ((EntryEditorTab) tab).updateField(fieldName, newFieldData); } } } /** * Sets all the text areas according to the shown entry. */ public void updateAllFields() { for (Object tab : tabs) { if (tab instanceof EntryEditorTab) { ((EntryEditorTab) tab).setEntry(entry); } } } public void updateAllContentSelectors() { if (!contentSelectors.isEmpty()) { for (FieldContentSelector contentSelector : contentSelectors) { contentSelector.rebuildComboBox(); } } } /** * Update the JTextArea when a field has changed. */ @Subscribe @SuppressWarnings("unused") public void listen(FieldChangedEvent fieldChangedEvent) { String newValue = fieldChangedEvent.getNewValue() == null ? "" : fieldChangedEvent.getNewValue(); if (SwingUtilities.isEventDispatchThread()) { setField(fieldChangedEvent.getFieldName(), newValue); } else { SwingUtilities.invokeLater(() -> setField(fieldChangedEvent.getFieldName(), newValue)); } } public void updateField(final Object sourceObject) { getStoreFieldAction().actionPerformed(new ActionEvent(sourceObject, 0, "")); } public void setMovingToDifferentEntry() { movingToDifferentEntry = true; unregisterListeners(); } private void unregisterListeners() { entry.unregisterListener(this); removeSearchListeners(); } private class TypeButton extends JButton { public TypeButton() { super(IconTheme.JabRefIcon.EDIT.getIcon()); setToolTipText(Localization.lang("Change entry type")); addActionListener(e -> showChangeEntryTypePopupMenu()); } } private void showChangeEntryTypePopupMenu() { JPopupMenu typeMenu = new ChangeEntryTypeMenu().getChangeentryTypePopupMenu(panel); typeMenu.show(this, 0, 0); } private class TypeLabel extends JLabel { public TypeLabel(String type) { super(type); setUI(new VerticalLabelUI(false)); setForeground(GUIGlobals.ENTRY_EDITOR_LABEL_COLOR); setHorizontalAlignment(SwingConstants.RIGHT); setFont(new Font("dialog", Font.ITALIC + Font.BOLD, 18)); // Add a mouse listener so the user can right-click the type label to change the entry type: addMouseListener(new MouseAdapter() { @Override public void mouseReleased(MouseEvent e) { if (e.isPopupTrigger() || (e.getButton() == MouseEvent.BUTTON3)) { handleTypeChange(); } } @Override public void mouseClicked(MouseEvent e) { if (e.isPopupTrigger() || (e.getButton() == MouseEvent.BUTTON3)) { handleTypeChange(); } } private void handleTypeChange() { showChangeEntryTypePopupMenu(); } }); } @Override public void paintComponent(Graphics g) { Graphics2D g2 = (Graphics2D) g; g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); super.paintComponent(g2); } } /** * Focus listener that fires the storeFieldAction when a TextArea loses focus. */ private class FieldListener extends FocusAdapter { @Override public void focusLost(FocusEvent event) { if (!event.isTemporary()) { updateField(event.getSource()); } } } private class TabListener implements ChangeListener { @Override public void stateChanged(ChangeEvent event) { // We tell the editor tab to update all its fields. This makes sure they are updated even if the tab we // just left contained one or more of the same fields as this one: SwingUtilities.invokeLater(() -> { Object activeTab = tabs.get(tabbed.getSelectedIndex()); if (activeTab instanceof EntryEditorTab) { ((EntryEditorTab) activeTab).updateAll(); activateVisible(); } // When the tab "Related articles" gets selected, the request to get the recommendations is started. if (((JTabbedPane) event.getSource()).getSelectedComponent().getName() .equals(Localization.lang("Related articles"))) { relatedArticlesTab.focus(); } }); } } class DeleteAction extends AbstractAction { public DeleteAction() { super(Localization.lang("Delete"), IconTheme.JabRefIcon.DELETE_ENTRY.getIcon()); putValue(Action.SHORT_DESCRIPTION, Localization.lang("Delete entry")); } @Override public void actionPerformed(ActionEvent e) { // Show confirmation dialog if not disabled: boolean goOn = panel.showDeleteConfirmationDialog(1); if (!goOn) { return; } panel.entryEditorClosing(EntryEditor.this); panel.getDatabase().removeEntry(entry); panel.markBaseChanged(); panel.getUndoManager().addEdit(new UndoableRemoveEntry(panel.getDatabase(), entry, panel)); panel.output(Localization.lang("Deleted entry")); } } public void close() { if (tabbed.getSelectedComponent() == srcPanel) { updateField(source); if (lastSourceAccepted) { panel.entryEditorClosing(EntryEditor.this); } else { panel.runCommand(Actions.SAVE); lastSourceAccepted = true; } } else { if (lastFieldAccepted) { panel.entryEditorClosing(EntryEditor.this); } else { panel.runCommand(Actions.SAVE); lastFieldAccepted = true; } } } class CloseAction extends AbstractAction { public CloseAction() { super(Localization.lang("Close window"), IconTheme.JabRefIcon.CLOSE.getSmallIcon()); putValue(Action.SHORT_DESCRIPTION, Localization.lang("Close window")); } @Override public void actionPerformed(ActionEvent e) { close(); } } class StoreFieldAction extends AbstractAction { public StoreFieldAction() { super("Store field value"); putValue(Action.SHORT_DESCRIPTION, "Store field value"); } @Override public void actionPerformed(ActionEvent event) { boolean movingAway = movingToDifferentEntry; movingToDifferentEntry = false; if (event.getSource() instanceof TextField) { // Storage from bibtex key field. TextField textField = (TextField) event.getSource(); String oldValue = entry.getCiteKeyOptional().orElse(null); String newValue = textField.getText(); if (newValue.isEmpty()) { newValue = null; } if (((oldValue == null) && (newValue == null)) || (Objects.equals(oldValue, newValue))) { return; // No change. } // Make sure the key is legal: String cleaned = BibtexKeyPatternUtil.checkLegalKey(newValue, Globals.prefs.getBoolean(JabRefPreferences.ENFORCE_LEGAL_BIBTEX_KEY)); if ((cleaned == null) || cleaned.equals(newValue)) { textField.setValidBackgroundColor(); } else { lastFieldAccepted = false; textField.setInvalidBackgroundColor(); if (!SwingUtilities.isEventDispatchThread()) { JOptionPane.showMessageDialog(frame, Localization.lang("Invalid BibTeX key"), Localization.lang("Error setting field"), JOptionPane.ERROR_MESSAGE); requestFocus(); } return; } if (newValue == null) { entry.clearCiteKey(); warnEmptyBibtexkey(); } else { entry.setCiteKey(newValue); boolean isDuplicate = panel.getDatabase().getDuplicationChecker().isDuplicateCiteKeyExisting(entry); if (isDuplicate) { warnDuplicateBibtexkey(); } else { panel.output(Localization.lang("BibTeX key is unique.")); } } // Add an UndoableKeyChange to the baseframe's undoManager. UndoableKeyChange undoableKeyChange = new UndoableKeyChange(entry, oldValue, newValue); if (updateTimeStampIsSet()) { NamedCompound ce = new NamedCompound(undoableKeyChange.getPresentationName()); ce.addEdit(undoableKeyChange); doUpdateTimeStamp().ifPresent(fieldChange -> ce.addEdit(new UndoableFieldChange(fieldChange))); ce.end(); panel.getUndoManager().addEdit(ce); } else { panel.getUndoManager().addEdit(undoableKeyChange); } textField.setValidBackgroundColor(); if (textField.getTextComponent().hasFocus()) { textField.setActiveBackgroundColor(); } updateSource(); panel.markBaseChanged(); } else if (event.getSource() instanceof FieldEditor) { String toSet = null; FieldEditor fieldEditor = (FieldEditor) event.getSource(); boolean set; // Trim the whitespace off this value String currentText = fieldEditor.getText().trim(); if (!currentText.isEmpty()) { toSet = currentText; } // We check if the field has changed, since we don't want to // mark the base as changed unless we have a real change. if (toSet == null) { set = entry.hasField(fieldEditor.getFieldName()); } else { set = !((entry.hasField(fieldEditor.getFieldName())) && toSet.equals(entry.getField(fieldEditor.getFieldName()).orElse(null))); } if (!set) { // We set the field and label color. fieldEditor.setValidBackgroundColor(); } else { try { // The following statement attempts to write the new contents into a StringWriter, and this will // cause an IOException if the field is not properly formatted. If that happens, the field // is not stored and the textarea turns red. if (toSet != null) { new LatexFieldFormatter(Globals.prefs.getLatexFieldFormatterPreferences()).format(toSet, fieldEditor.getFieldName()); } String oldValue = entry.getField(fieldEditor.getFieldName()).orElse(null); if (toSet == null) { entry.clearField(fieldEditor.getFieldName()); } else { entry.setField(fieldEditor.getFieldName(), toSet); } fieldEditor.setValidBackgroundColor(); // See if we need to update an AutoCompleter instance: AutoCompleter<String> aComp = panel.getAutoCompleters().get(fieldEditor.getFieldName()); if (aComp != null) { aComp.addBibtexEntry(entry); } // Add an UndoableFieldChange to the baseframe's undoManager. UndoableFieldChange undoableFieldChange = new UndoableFieldChange(entry, fieldEditor.getFieldName(), oldValue, toSet); if (updateTimeStampIsSet()) { NamedCompound ce = new NamedCompound(undoableFieldChange.getPresentationName()); ce.addEdit(undoableFieldChange); doUpdateTimeStamp() .ifPresent(fieldChange -> ce.addEdit(new UndoableFieldChange(fieldChange))); ce.end(); panel.getUndoManager().addEdit(ce); } else { panel.getUndoManager().addEdit(undoableFieldChange); } updateSource(); panel.markBaseChanged(); } catch (IllegalArgumentException ex) { lastFieldAccepted = false; fieldEditor.setInvalidBackgroundColor(); if (!SwingUtilities.isEventDispatchThread()) { JOptionPane.showMessageDialog(frame, Localization.lang("Error") + ": " + ex.getMessage(), Localization.lang("Error setting field"), JOptionPane.ERROR_MESSAGE); LOGGER.debug("Error setting field", ex); requestFocus(); } } } if (fieldEditor.getTextComponent().hasFocus()) { fieldEditor.setBackground(GUIGlobals.ACTIVE_EDITOR_COLOR); } } else if (source.isEditable() && !source.getText().equals(lastSourceStringAccepted)) { validEntry = storeSource(); } // Make sure we scroll to the entry if it moved in the table. // Should only be done if this editor is currently showing: if (!movingAway && isShowing()) { panel.highlightEntry(entry); } } } class SwitchLeftAction extends AbstractAction { public SwitchLeftAction() { super("Switch to the panel to the left"); } @Override public void actionPerformed(ActionEvent e) { int i = tabbed.getSelectedIndex(); tabbed.setSelectedIndex(i > 0 ? i - 1 : tabbed.getTabCount() - 1); activateVisible(); } } class SwitchRightAction extends AbstractAction { public SwitchRightAction() { super("Switch to the panel to the right"); } @Override public void actionPerformed(ActionEvent e) { int i = tabbed.getSelectedIndex(); tabbed.setSelectedIndex(i < (tabbed.getTabCount() - 1) ? i + 1 : 0); activateVisible(); } } class NextEntryAction extends AbstractAction { public NextEntryAction() { super(Localization.lang("Next entry"), IconTheme.JabRefIcon.DOWN.getIcon()); putValue(Action.SHORT_DESCRIPTION, Localization.lang("Next entry")); } @Override public void actionPerformed(ActionEvent e) { panel.selectNextEntry(); } } class PrevEntryAction extends AbstractAction { public PrevEntryAction() { super(Localization.lang("Previous entry"), IconTheme.JabRefIcon.UP.getIcon()); putValue(Action.SHORT_DESCRIPTION, Localization.lang("Previous entry")); } @Override public void actionPerformed(ActionEvent e) { panel.selectPreviousEntry(); } } class GenerateKeyAction extends AbstractAction { public GenerateKeyAction() { super(Localization.lang("Generate BibTeX key"), IconTheme.JabRefIcon.MAKE_KEY.getIcon()); putValue(Action.SHORT_DESCRIPTION, Localization.lang("Generate BibTeX key")); } @Override public void actionPerformed(ActionEvent e) { // 1. get BibEntry for selected index (already have) // 2. update label // Store the current edit in case this action is called during the editing of a field: storeCurrentEdit(); // This is a partial clone of net.sf.jabref.gui.BasePanel.setupActions().new AbstractWorker() {...}.run() // this updates the table automatically, on close, but not within the tab Optional<String> oldValue = entry.getCiteKeyOptional(); if (oldValue.isPresent()) { if (Globals.prefs.getBoolean(JabRefPreferences.AVOID_OVERWRITING_KEY)) { panel.output(Localization.lang("Not overwriting existing key. To change this setting, open Options -> Prefererences -> BibTeX key generator")); return; } else if (Globals.prefs.getBoolean(JabRefPreferences.WARN_BEFORE_OVERWRITING_KEY)) { CheckBoxMessage cbm = new CheckBoxMessage(Localization.lang("The current BibTeX key will be overwritten. Continue?"), Localization.lang("Disable this confirmation dialog"), false); int answer = JOptionPane.showConfirmDialog(frame, cbm, Localization.lang("Overwrite key"), JOptionPane.YES_NO_OPTION); if (cbm.isSelected()) { Globals.prefs.putBoolean(JabRefPreferences.WARN_BEFORE_OVERWRITING_KEY, false); } if (answer == JOptionPane.NO_OPTION) { // Ok, break off the operation. return; } } } BibtexKeyPatternUtil.makeAndSetLabel(panel.getBibDatabaseContext().getMetaData() .getCiteKeyPattern(Globals.prefs.getBibtexKeyPatternPreferences().getKeyPattern()), panel.getDatabase(), entry, Globals.prefs.getBibtexKeyPatternPreferences()); // Store undo information: panel.getUndoManager().addEdit( new UndoableKeyChange(entry, oldValue.orElse(null), entry.getCiteKeyOptional().get())); // Cite key always set here // here we update the field String bibtexKeyData = entry.getCiteKeyOptional().get(); setField(BibEntry.KEY_FIELD, bibtexKeyData); updateSource(); panel.markBaseChanged(); } } class UndoAction extends AbstractAction { public UndoAction() { super("Undo", IconTheme.JabRefIcon.UNDO.getIcon()); putValue(Action.SHORT_DESCRIPTION, "Undo"); } @Override public void actionPerformed(ActionEvent e) { panel.runCommand(Actions.UNDO); } } class RedoAction extends AbstractAction { public RedoAction() { super("Redo", IconTheme.JabRefIcon.REDO.getIcon()); putValue(Action.SHORT_DESCRIPTION, "Redo"); } @Override public void actionPerformed(ActionEvent e) { panel.runCommand(Actions.REDO); } } class SaveDatabaseAction extends AbstractAction { public SaveDatabaseAction() { super("Save database"); } @Override public void actionPerformed(ActionEvent e) { Object activeTab = tabs.get(tabbed.getSelectedIndex()); if (activeTab instanceof EntryEditorTab) { // Normal panel. EntryEditorTab tab = (EntryEditorTab) activeTab; FieldEditor fieldEditor = tab.getActive(); fieldEditor.clearAutoCompleteSuggestion(); updateField(fieldEditor); } else { // Source panel. updateField(activeTab); } if (validEntry) { panel.runCommand(Actions.SAVE); } } } private void warnDuplicateBibtexkey() { panel.output(Localization.lang("Duplicate BibTeX key") + ". " + Localization.lang("Grouping may not work for this entry.")); } private void warnEmptyBibtexkey() { panel.output(Localization.lang("Empty BibTeX key") + ". " + Localization.lang("Grouping may not work for this entry.")); } public AbstractAction getNextEntryAction() { return nextEntryAction; } public AbstractAction getPrevEntryAction() { return prevEntryAction; } public SwitchLeftAction getSwitchLeftAction() { return switchLeftAction; } public SwitchRightAction getSwitchRightAction() { return switchRightAction; } public SaveDatabaseAction getSaveDatabaseAction() { return saveDatabaseAction; } public HelpAction getHelpAction() { return helpAction; } public GenerateKeyAction getGenerateKeyAction() { return generateKeyAction; } public StoreFieldAction getStoreFieldAction() { return storeFieldAction; } private class AutoLinkAction extends AbstractAction { public AutoLinkAction() { putValue(Action.SMALL_ICON, IconTheme.JabRefIcon.AUTO_FILE_LINK.getIcon()); putValue(Action.SHORT_DESCRIPTION, Localization.lang("Automatically set file links for this entry") + " (Alt-F)"); } @Override public void actionPerformed(ActionEvent event) { FileListEditor localFileListEditor = EntryEditor.this.fileListEditor; if (localFileListEditor == null) { LOGGER.warn("No file list editor found."); } else { localFileListEditor.autoSetLinks(); } } } private boolean updateTimeStampIsSet() { return Globals.prefs.getBoolean(JabRefPreferences.USE_TIME_STAMP) && Globals.prefs.getBoolean(JabRefPreferences.UPDATE_TIMESTAMP); } /** * Updates the timestamp of the given entry and returns the FieldChange */ private Optional<FieldChange> doUpdateTimeStamp() { String timeStampField = Globals.prefs.get(JabRefPreferences.TIME_STAMP_FIELD); String timeStampFormat = Globals.prefs.get(JabRefPreferences.TIME_STAMP_FORMAT); String timestamp = DateTimeFormatter.ofPattern(timeStampFormat).format(LocalDateTime.now()); return UpdateField.updateField(entry, timeStampField, timestamp); } }
/* * Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. */ package com.intellij.debugger.ui.impl.watch; import com.intellij.codeInsight.ChangeContextUtil; import com.intellij.debugger.DebuggerBundle; import com.intellij.debugger.codeinsight.RuntimeTypeEvaluator; import com.intellij.debugger.engine.evaluation.EvaluateException; import com.intellij.debugger.engine.evaluation.TextWithImports; import com.intellij.debugger.engine.evaluation.TextWithImportsImpl; import com.intellij.debugger.impl.DebuggerContextImpl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Key; import com.intellij.pom.java.LanguageLevel; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.containers.SmartHashSet; import com.sun.jdi.ObjectReference; import com.sun.jdi.ReferenceType; import com.sun.jdi.Value; import org.jetbrains.annotations.Nullable; import java.util.Set; public class DebuggerTreeNodeExpression { private static final Logger LOG = Logger.getInstance("#com.intellij.debugger.ui.impl.watch.DebuggerTreeNodeExpression"); // private static PsiExpression beautifyExpression(PsiExpression expression) throws IncorrectOperationException { // final PsiElementFactory elementFactory = expression.getManager().getElementFactory(); // final PsiParenthesizedExpression utility = (PsiParenthesizedExpression)elementFactory.createExpressionFromText( // "(expr)", expression.getContext()); // utility.getExpression().replace(expression); // // PsiRecursiveElementVisitor visitor = new PsiRecursiveElementVisitor() { // @Override public void visitTypeCastExpression(PsiTypeCastExpression expression) { // try { // super.visitTypeCastExpression(expression); // // PsiElement parent; // PsiElement toBeReplaced = expression; // for (parent = expression.getParent(); // parent instanceof PsiParenthesizedExpression && parent != utility; // parent = parent.getParent()) { // toBeReplaced = parent; // } // // if (parent instanceof PsiReferenceExpression) { // PsiReferenceExpression reference = ((PsiReferenceExpression)parent); // //((TypeCast)).member // PsiElement oldResolved = reference.resolve(); // // if (oldResolved != null) { // PsiReferenceExpression newReference = ((PsiReferenceExpression)reference.copy()); // newReference.getQualifierExpression().replace(expression.getOperand()); // PsiElement newResolved = newReference.resolve(); // // if (oldResolved == newResolved) { // toBeReplaced.replace(expression.getOperand()); // } // else if (newResolved instanceof PsiMethod && oldResolved instanceof PsiMethod) { // if (isSuperMethod((PsiMethod)newResolved, (PsiMethod)oldResolved)) { // toBeReplaced.replace(expression.getOperand()); // } // } // } // } // else { // toBeReplaced.replace(expression.getOperand()); // } // } // catch (IncorrectOperationException e) { // throw new IncorrectOperationRuntimeException(e); // } // } // // @Override public void visitReferenceExpression(PsiReferenceExpression expression) { // expression.acceptChildren(this); // // try { // JavaResolveResult resolveResult = expression.advancedResolve(false); // // PsiElement oldResolved = resolveResult.getElement(); // // if(oldResolved == null) return; // // PsiReferenceExpression newReference; // if (expression instanceof PsiMethodCallExpression) { // int length = expression.getQualifierExpression().getTextRange().getLength(); // PsiMethodCallExpression methodCall = (PsiMethodCallExpression)elementFactory.createExpressionFromText( // expression.getText().substring(length), expression.getContext()); // newReference = methodCall.getMethodExpression(); // } // else { // newReference = // (PsiReferenceExpression)elementFactory.createExpressionFromText(expression.getReferenceName(), // expression.getContext()); // } // // PsiElement newResolved = newReference.resolve(); // if (oldResolved == newResolved) { // expression.replace(newReference); // } // } // catch (IncorrectOperationException e) { // LOG.debug(e); // } // } // }; // // try { // utility.accept(visitor); // } // catch (IncorrectOperationRuntimeException e) { // throw e.getException(); // } // return utility.getExpression(); // } private static boolean isSuperMethod(PsiMethod superMethod, PsiMethod overridingMethod) { PsiMethod[] superMethods = overridingMethod.findSuperMethods(); for (PsiMethod method : superMethods) { if (method == superMethod || isSuperMethod(superMethod, method)) { return true; } } return false; } @Nullable public static PsiExpression substituteThis(@Nullable PsiElement expressionWithThis, PsiExpression howToEvaluateThis, Value howToEvaluateThisValue) throws EvaluateException { if (!(expressionWithThis instanceof PsiExpression)) return null; PsiExpression result = (PsiExpression)expressionWithThis.copy(); PsiClass thisClass = PsiTreeUtil.getContextOfType(result, PsiClass.class, true); boolean castNeeded = true; if (thisClass != null) { PsiType type = howToEvaluateThis.getType(); if(type != null) { if(type instanceof PsiClassType) { PsiClass psiClass = ((PsiClassType) type).resolve(); if(psiClass != null && (psiClass == thisClass || psiClass.isInheritor(thisClass, true))) { castNeeded = false; } } else if(type instanceof PsiArrayType) { LanguageLevel languageLevel = PsiUtil.getLanguageLevel(expressionWithThis); if(thisClass == JavaPsiFacade.getInstance(expressionWithThis.getProject()).getElementFactory().getArrayClass(languageLevel)) { castNeeded = false; } } } } if (castNeeded) { howToEvaluateThis = castToRuntimeType(howToEvaluateThis, howToEvaluateThisValue); } ChangeContextUtil.encodeContextInfo(result, false); PsiExpression psiExpression; try { psiExpression = (PsiExpression) ChangeContextUtil.decodeContextInfo(result, thisClass, howToEvaluateThis); } catch (IncorrectOperationException e) { throw new EvaluateException( DebuggerBundle.message("evaluation.error.invalid.this.expression", result.getText(), howToEvaluateThis.getText()), null); } try { PsiExpression res = JavaPsiFacade.getInstance(howToEvaluateThis.getProject()).getElementFactory() .createExpressionFromText(psiExpression.getText(), howToEvaluateThis.getContext()); res.putUserData(ADDITIONAL_IMPORTS_KEY, howToEvaluateThis.getUserData(ADDITIONAL_IMPORTS_KEY)); return res; } catch (IncorrectOperationException e) { throw new EvaluateException(e.getMessage(), e); } } public static final Key<Set<String>> ADDITIONAL_IMPORTS_KEY = Key.create("ADDITIONAL_IMPORTS"); public static PsiExpression castToRuntimeType(PsiExpression expression, Value value) throws EvaluateException { if (!(value instanceof ObjectReference)) { return expression; } ReferenceType valueType = ((ObjectReference)value).referenceType(); if (valueType == null) { return expression; } Project project = expression.getProject(); PsiType type = RuntimeTypeEvaluator.getCastableRuntimeType(project, value); if (type == null) { return expression; } PsiElementFactory elementFactory = JavaPsiFacade.getElementFactory(project); String typeName = type.getCanonicalText(); try { PsiParenthesizedExpression parenthExpression = (PsiParenthesizedExpression)elementFactory.createExpressionFromText( "((" + typeName + ")expression)", null); //noinspection ConstantConditions ((PsiTypeCastExpression)parenthExpression.getExpression()).getOperand().replace(expression); Set<String> imports = expression.getUserData(ADDITIONAL_IMPORTS_KEY); if (imports == null) { imports = new SmartHashSet<>(); } imports.add(typeName); parenthExpression.putUserData(ADDITIONAL_IMPORTS_KEY, imports); return parenthExpression; } catch (IncorrectOperationException e) { throw new EvaluateException(DebuggerBundle.message("error.invalid.type.name", typeName), e); } } /** * @param qualifiedName the class qualified name to be resolved against the current execution context * @return short name if the class could be resolved using short name, * otherwise returns qualifiedName */ public static String normalize(final String qualifiedName, PsiElement contextElement, Project project) { if (contextElement == null) { return qualifiedName; } final JavaPsiFacade facade = JavaPsiFacade.getInstance(project); PsiClass aClass = facade.findClass(qualifiedName, GlobalSearchScope.allScope(project)); if (aClass != null) { return normalizePsiClass(aClass, contextElement, facade.getResolveHelper()); } return qualifiedName; } private static String normalizePsiClass(PsiClass psiClass, PsiElement contextElement, PsiResolveHelper helper) { String name = psiClass.getName(); PsiClass aClass = helper.resolveReferencedClass(name, contextElement); if (psiClass.equals(aClass)) { return name; } PsiClass parentClass = psiClass.getContainingClass(); if (parentClass != null) { return normalizePsiClass(parentClass, contextElement, helper) + "." + name; } return psiClass.getQualifiedName(); } public static PsiExpression getEvaluationExpression(DebuggerTreeNodeImpl node, DebuggerContextImpl context) throws EvaluateException { if(node.getDescriptor() instanceof ValueDescriptorImpl) { throw new IllegalStateException("Not supported any more"); //return ((ValueDescriptorImpl)node.getDescriptor()).getTreeEvaluation(node, context); } else { LOG.error(node.getDescriptor() != null ? node.getDescriptor().getClass().getName() : "null"); return null; } } public static TextWithImports createEvaluationText(final DebuggerTreeNodeImpl node, final DebuggerContextImpl context) throws EvaluateException { final EvaluateException[] ex = new EvaluateException[] {null}; final TextWithImports textWithImports = PsiDocumentManager.getInstance(context.getProject()).commitAndRunReadAction( (Computable<TextWithImports>)() -> { try { final PsiExpression expressionText = getEvaluationExpression(node, context); if (expressionText != null) { return new TextWithImportsImpl(expressionText); } } catch (EvaluateException e) { ex[0] = e; } return null; }); if (ex[0] != null) { throw ex[0]; } return textWithImports; } private static class IncorrectOperationRuntimeException extends RuntimeException { private final IncorrectOperationException myException; public IncorrectOperationRuntimeException(IncorrectOperationException exception) { myException = exception; } public IncorrectOperationException getException() { return myException; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.connectors.kafka; import org.apache.flink.annotation.PublicEvolving; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.api.common.serialization.DeserializationSchema; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.streaming.api.operators.StreamingRuntimeContext; import org.apache.flink.streaming.connectors.kafka.config.OffsetCommitMode; import org.apache.flink.streaming.connectors.kafka.internals.AbstractFetcher; import org.apache.flink.streaming.connectors.kafka.internals.AbstractPartitionDiscoverer; import org.apache.flink.streaming.connectors.kafka.internals.KafkaDeserializationSchemaWrapper; import org.apache.flink.streaming.connectors.kafka.internals.KafkaFetcher; import org.apache.flink.streaming.connectors.kafka.internals.KafkaPartitionDiscoverer; import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicPartition; import org.apache.flink.streaming.connectors.kafka.internals.KafkaTopicsDescriptor; import org.apache.flink.util.PropertiesUtil; import org.apache.flink.util.SerializedValue; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.consumer.OffsetAndTimestamp; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.serialization.ByteArrayDeserializer; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.regex.Pattern; import static org.apache.flink.util.Preconditions.checkNotNull; import static org.apache.flink.util.PropertiesUtil.getBoolean; import static org.apache.flink.util.PropertiesUtil.getLong; /** * The Flink Kafka Consumer is a streaming data source that pulls a parallel data stream from * Apache Kafka. The consumer can run in multiple parallel instances, each of which will pull * data from one or more Kafka partitions. * * <p>The Flink Kafka Consumer participates in checkpointing and guarantees that no data is lost * during a failure, and that the computation processes elements "exactly once". * (Note: These guarantees naturally assume that Kafka itself does not loose any data.)</p> * * <p>Please note that Flink snapshots the offsets internally as part of its distributed checkpoints. The offsets * committed to Kafka are only to bring the outside view of progress in sync with Flink's view * of the progress. That way, monitoring and other jobs can get a view of how far the Flink Kafka consumer * has consumed a topic.</p> * * <p>Please refer to Kafka's documentation for the available configuration properties: * http://kafka.apache.org/documentation.html#newconsumerconfigs</p> */ @PublicEvolving public class FlinkKafkaConsumer<T> extends FlinkKafkaConsumerBase<T> { private static final long serialVersionUID = 1L; /** Configuration key to change the polling timeout. **/ public static final String KEY_POLL_TIMEOUT = "flink.poll-timeout"; /** From Kafka's Javadoc: The time, in milliseconds, spent waiting in poll if data is not * available. If 0, returns immediately with any records that are available now. */ public static final long DEFAULT_POLL_TIMEOUT = 100L; // ------------------------------------------------------------------------ /** User-supplied properties for Kafka. **/ protected final Properties properties; /** From Kafka's Javadoc: The time, in milliseconds, spent waiting in poll if data is not * available. If 0, returns immediately with any records that are available now */ protected final long pollTimeout; // ------------------------------------------------------------------------ /** * Creates a new Kafka streaming source consumer. * * @param topic The name of the topic that should be consumed. * @param valueDeserializer The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props */ public FlinkKafkaConsumer(String topic, DeserializationSchema<T> valueDeserializer, Properties props) { this(Collections.singletonList(topic), valueDeserializer, props); } /** * Creates a new Kafka streaming source consumer. * * <p>This constructor allows passing a {@see KafkaDeserializationSchema} for reading key/value * pairs, offsets, and topic names from Kafka. * * @param topic The name of the topic that should be consumed. * @param deserializer The keyed de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props */ public FlinkKafkaConsumer(String topic, KafkaDeserializationSchema<T> deserializer, Properties props) { this(Collections.singletonList(topic), deserializer, props); } /** * Creates a new Kafka streaming source consumer. * * <p>This constructor allows passing multiple topics to the consumer. * * @param topics The Kafka topics to read from. * @param deserializer The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props */ public FlinkKafkaConsumer(List<String> topics, DeserializationSchema<T> deserializer, Properties props) { this(topics, new KafkaDeserializationSchemaWrapper<>(deserializer), props); } /** * Creates a new Kafka streaming source consumer. * * <p>This constructor allows passing multiple topics and a key/value deserialization schema. * * @param topics The Kafka topics to read from. * @param deserializer The keyed de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props */ public FlinkKafkaConsumer(List<String> topics, KafkaDeserializationSchema<T> deserializer, Properties props) { this(topics, null, deserializer, props); } /** * Creates a new Kafka streaming source consumer. Use this constructor to * subscribe to multiple topics based on a regular expression pattern. * * <p>If partition discovery is enabled (by setting a non-negative value for * {@link FlinkKafkaConsumer#KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS} in the properties), topics * with names matching the pattern will also be subscribed to as they are created on the fly. * * @param subscriptionPattern The regular expression for a pattern of topic names to subscribe to. * @param valueDeserializer The de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props */ public FlinkKafkaConsumer(Pattern subscriptionPattern, DeserializationSchema<T> valueDeserializer, Properties props) { this(null, subscriptionPattern, new KafkaDeserializationSchemaWrapper<>(valueDeserializer), props); } /** * Creates a new Kafka streaming source consumer. Use this constructor to * subscribe to multiple topics based on a regular expression pattern. * * <p>If partition discovery is enabled (by setting a non-negative value for * {@link FlinkKafkaConsumer#KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS} in the properties), topics * with names matching the pattern will also be subscribed to as they are created on the fly. * * <p>This constructor allows passing a {@see KafkaDeserializationSchema} for reading key/value * pairs, offsets, and topic names from Kafka. * * @param subscriptionPattern The regular expression for a pattern of topic names to subscribe to. * @param deserializer The keyed de-/serializer used to convert between Kafka's byte messages and Flink's objects. * @param props */ public FlinkKafkaConsumer(Pattern subscriptionPattern, KafkaDeserializationSchema<T> deserializer, Properties props) { this(null, subscriptionPattern, deserializer, props); } private FlinkKafkaConsumer( List<String> topics, Pattern subscriptionPattern, KafkaDeserializationSchema<T> deserializer, Properties props) { super( topics, subscriptionPattern, deserializer, getLong( checkNotNull(props, "props"), KEY_PARTITION_DISCOVERY_INTERVAL_MILLIS, PARTITION_DISCOVERY_DISABLED), !getBoolean(props, KEY_DISABLE_METRICS, false)); this.properties = props; setDeserializer(this.properties); // configure the polling timeout try { if (properties.containsKey(KEY_POLL_TIMEOUT)) { this.pollTimeout = Long.parseLong(properties.getProperty(KEY_POLL_TIMEOUT)); } else { this.pollTimeout = DEFAULT_POLL_TIMEOUT; } } catch (Exception e) { throw new IllegalArgumentException("Cannot parse poll timeout for '" + KEY_POLL_TIMEOUT + '\'', e); } } @Override protected AbstractFetcher<T, ?> createFetcher( SourceContext<T> sourceContext, Map<KafkaTopicPartition, Long> assignedPartitionsWithInitialOffsets, SerializedValue<WatermarkStrategy<T>> watermarkStrategy, StreamingRuntimeContext runtimeContext, OffsetCommitMode offsetCommitMode, MetricGroup consumerMetricGroup, boolean useMetrics) throws Exception { // make sure that auto commit is disabled when our offset commit mode is ON_CHECKPOINTS; // this overwrites whatever setting the user configured in the properties adjustAutoCommitConfig(properties, offsetCommitMode); return new KafkaFetcher<>( sourceContext, assignedPartitionsWithInitialOffsets, watermarkStrategy, runtimeContext.getProcessingTimeService(), runtimeContext.getExecutionConfig().getAutoWatermarkInterval(), runtimeContext.getUserCodeClassLoader(), runtimeContext.getTaskNameWithSubtasks(), deserializer, properties, pollTimeout, runtimeContext.getMetricGroup(), consumerMetricGroup, useMetrics); } @Override protected AbstractPartitionDiscoverer createPartitionDiscoverer( KafkaTopicsDescriptor topicsDescriptor, int indexOfThisSubtask, int numParallelSubtasks) { return new KafkaPartitionDiscoverer(topicsDescriptor, indexOfThisSubtask, numParallelSubtasks, properties); } @Override protected Map<KafkaTopicPartition, Long> fetchOffsetsWithTimestamp( Collection<KafkaTopicPartition> partitions, long timestamp) { Map<TopicPartition, Long> partitionOffsetsRequest = new HashMap<>(partitions.size()); for (KafkaTopicPartition partition : partitions) { partitionOffsetsRequest.put( new TopicPartition(partition.getTopic(), partition.getPartition()), timestamp); } final Map<KafkaTopicPartition, Long> result = new HashMap<>(partitions.size()); // use a short-lived consumer to fetch the offsets; // this is ok because this is a one-time operation that happens only on startup try (KafkaConsumer<?, ?> consumer = new KafkaConsumer(properties)) { for (Map.Entry<TopicPartition, OffsetAndTimestamp> partitionToOffset : consumer.offsetsForTimes(partitionOffsetsRequest).entrySet()) { result.put( new KafkaTopicPartition(partitionToOffset.getKey().topic(), partitionToOffset.getKey().partition()), (partitionToOffset.getValue() == null) ? null : partitionToOffset.getValue().offset()); } } return result; } @Override protected boolean getIsAutoCommitEnabled() { return getBoolean(properties, ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true) && PropertiesUtil.getLong(properties, ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, 5000) > 0; } /** * Makes sure that the ByteArrayDeserializer is registered in the Kafka properties. * * @param props The Kafka properties to register the serializer in. */ private static void setDeserializer(Properties props) { final String deSerName = ByteArrayDeserializer.class.getName(); Object keyDeSer = props.get(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG); Object valDeSer = props.get(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG); if (keyDeSer != null && !keyDeSer.equals(deSerName)) { LOG.warn("Ignoring configured key DeSerializer ({})", ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG); } if (valDeSer != null && !valDeSer.equals(deSerName)) { LOG.warn("Ignoring configured value DeSerializer ({})", ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG); } props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, deSerName); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deSerName); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.psi.stubs; import com.intellij.lang.Language; import com.intellij.lang.LanguageParserDefinitions; import com.intellij.lang.ParserDefinition; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.LanguageFileType; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.util.ThrowableComputable; import com.intellij.openapi.util.io.BufferExposingByteArrayOutputStream; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.newvfs.FileAttribute; import com.intellij.openapi.vfs.newvfs.NewVirtualFile; import com.intellij.openapi.vfs.newvfs.persistent.FSRecords; import com.intellij.psi.tree.IFileElementType; import com.intellij.psi.tree.IStubFileElementType; import com.intellij.util.ThrowableRunnable; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.indexing.*; import com.intellij.util.indexing.impl.*; import com.intellij.util.io.*; import gnu.trove.THashMap; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.*; import java.io.DataOutputStream; import java.util.*; /* * @author max */ public class StubUpdatingIndex extends CustomImplementationFileBasedIndexExtension<Integer, SerializedStubTree, FileContent> implements PsiDependentIndex, CustomInputsIndexFileBasedIndexExtension<Integer> { static final Logger LOG = Logger.getInstance("#com.intellij.psi.stubs.StubUpdatingIndex"); private static final int VERSION = 32 + (PersistentHashMapValueStorage.COMPRESSION_ENABLED ? 1 : 0); // todo remove once we don't need this for stub-ast mismatch debug info private static final FileAttribute INDEXED_STAMP = new FileAttribute("stubIndexStamp", 2, true); public static final ID<Integer, SerializedStubTree> INDEX_ID = ID.create("Stubs"); private static final DataExternalizer<SerializedStubTree> KEY_EXTERNALIZER = new DataExternalizer<SerializedStubTree>() { @Override public void save(@NotNull final DataOutput out, @NotNull final SerializedStubTree v) throws IOException { v.write(out); } @NotNull @Override public SerializedStubTree read(@NotNull final DataInput in) throws IOException { return new SerializedStubTree(in); } }; private static final FileBasedIndex.InputFilter INPUT_FILTER = file -> canHaveStub(file); public static boolean canHaveStub(@NotNull VirtualFile file) { final FileType fileType = file.getFileType(); if (fileType instanceof LanguageFileType) { final Language l = ((LanguageFileType)fileType).getLanguage(); final ParserDefinition parserDefinition = LanguageParserDefinitions.INSTANCE.forLanguage(l); if (parserDefinition == null) { return false; } final IFileElementType elementType = parserDefinition.getFileNodeType(); if (elementType instanceof IStubFileElementType) { if (((IStubFileElementType)elementType).shouldBuildStubFor(file)) { return true; } FileBasedIndex fileBasedIndex = FileBasedIndex.getInstance(); if (file instanceof NewVirtualFile && fileBasedIndex instanceof FileBasedIndexImpl && ((FileBasedIndexImpl)fileBasedIndex).getIndex(INDEX_ID).isIndexedStateForFile(((NewVirtualFile)file).getId(), file)) { return true; } } } final BinaryFileStubBuilder builder = BinaryFileStubBuilders.INSTANCE.forFileType(fileType); return builder != null && builder.acceptsFile(file); } private static final KeyDescriptor<Integer> DATA_DESCRIPTOR = new IntInlineKeyDescriptor(); @NotNull @Override public ID<Integer, SerializedStubTree> getName() { return INDEX_ID; } @Override public int getCacheSize() { return 5; // no need to cache many serialized trees } @Override public boolean keyIsUniqueForIndexedFile() { return true; } @NotNull @Override public DataExternalizer<Collection<Integer>> createExternalizer() { return new DataExternalizer<Collection<Integer>>() { @Override public void save(@NotNull DataOutput out, Collection<Integer> value) throws IOException { DataInputOutputUtil.writeINT(out, value.iterator().next()); Map<StubIndexKey, Map<Object, StubIdList>> stubIndicesValueMap = ((StubUpdatingIndexKeys)value).myStubIndicesValueMap; DataInputOutputUtil.writeINT(out, stubIndicesValueMap != null ? stubIndicesValueMap.size() : 0); if (stubIndicesValueMap != null && stubIndicesValueMap.size() > 0) { StubIndexImpl stubIndex = StubIndexImpl.getInstanceOrInvalidate(); for(StubIndexKey stubIndexKey:stubIndicesValueMap.keySet()) { DataInputOutputUtil.writeINT(out, stubIndexKey.getUniqueId()); Map<Object, StubIdList> map = stubIndicesValueMap.get(stubIndexKey); stubIndex.serializeIndexValue(out, stubIndexKey, map); } } } @Override public Collection<Integer> read(@NotNull DataInput in) throws IOException { int fileId = DataInputOutputUtil.readINT(in); StubUpdatingIndexKeys integers = new StubUpdatingIndexKeys(ContainerUtil.set(fileId)); int stubIndicesValueMapSize = DataInputOutputUtil.readINT(in); if (stubIndicesValueMapSize > 0) { THashMap<StubIndexKey, Map<Object, StubIdList>> stubIndicesValueMap = new THashMap<>(stubIndicesValueMapSize); StubIndexImpl stubIndex = StubIndexImpl.getInstanceOrInvalidate(); for(int i = 0; i < stubIndicesValueMapSize; ++i) { int stubIndexId = DataInputOutputUtil.readINT(in); ID<Object, ?> indexKey = (ID<Object, ?>)StubIndexKey.findById(stubIndexId); if (indexKey instanceof StubIndexKey) { // indexKey can be ID in case of removed index StubIndexKey<Object, ?> stubIndexKey = (StubIndexKey<Object, ?>)indexKey; stubIndicesValueMap.put(stubIndexKey, stubIndex.deserializeIndexValue(in, stubIndexKey)); } } integers.myStubIndicesValueMap = stubIndicesValueMap; } return integers; } }; } static class StubUpdatingIndexKeys extends AbstractSet<Integer> { private final Set<Integer> myBackingMap; private Map<StubIndexKey, Map<Object, StubIdList>> myStubIndicesValueMap = Collections.emptyMap(); StubUpdatingIndexKeys(Set<Integer> backingMap) { myBackingMap = backingMap; } @Override public Iterator<Integer> iterator() { return myBackingMap.iterator(); } @Override public int size() { return myBackingMap.size(); } } @NotNull @Override public DataIndexer<Integer, SerializedStubTree, FileContent> getIndexer() { return new DataIndexer<Integer, SerializedStubTree, FileContent>() { @Override @NotNull public Map<Integer, SerializedStubTree> map(@NotNull final FileContent inputData) { final Map<Integer, SerializedStubTree> result = new THashMap<Integer, SerializedStubTree>() { StubUpdatingIndexKeys myKeySet; @Override public Set<Integer> keySet() { if (myKeySet == null) { myKeySet = new StubUpdatingIndexKeys(super.keySet()); } return myKeySet; } }; ApplicationManager.getApplication().runReadAction(() -> { final Stub rootStub = StubTreeBuilder.buildStubTree(inputData); if (rootStub == null) return; VirtualFile file = inputData.getFile(); int contentLength; if (file.getFileType().isBinary()) { contentLength = -1; } else { contentLength = ((FileContentImpl)inputData).getPsiFileForPsiDependentIndex().getTextLength(); } rememberIndexingStamp(file, contentLength); final BufferExposingByteArrayOutputStream bytes = new BufferExposingByteArrayOutputStream(); SerializationManagerEx.getInstanceEx().serialize(rootStub, bytes); if (DebugAssertions.DEBUG) { try { Stub deserialized = SerializationManagerEx.getInstanceEx().deserialize(new ByteArrayInputStream(bytes.getInternalBuffer(), 0, bytes.size())); check(deserialized, rootStub); } catch(ProcessCanceledException pce) { throw pce; } catch (Throwable t) { LOG.error("Error indexing:" + file, t); } } final int key = Math.abs(FileBasedIndex.getFileId(file)); SerializedStubTree serializedStubTree = new SerializedStubTree(bytes.getInternalBuffer(), bytes.size(), rootStub, file.getLength(), contentLength); result.put(key, serializedStubTree); try { ((StubUpdatingIndexKeys)result.keySet()).myStubIndicesValueMap = calcStubIndicesValueMap(serializedStubTree, key); } catch (StorageException ex) { throw new RuntimeException(ex); } }); return result; } }; } private static void check(Stub stub, Stub stub2) { assert stub.getStubType() == stub2.getStubType(); List<? extends Stub> stubs = stub.getChildrenStubs(); List<? extends Stub> stubs2 = stub2.getChildrenStubs(); assert stubs.size() == stubs2.size(); for(int i = 0, len = stubs.size(); i < len; ++i) { check(stubs.get(i), stubs2.get(i)); } } private static void rememberIndexingStamp(final VirtualFile file, long contentLength) { try (DataOutputStream stream = INDEXED_STAMP.writeAttribute(file)) { DataInputOutputUtil.writeTIME(stream, file.getTimeStamp()); DataInputOutputUtil.writeLONG(stream, contentLength); } catch (IOException e) { LOG.error(e); } } @Nullable public static IndexingStampInfo getIndexingStampInfo(VirtualFile file) { try (DataInputStream stream = INDEXED_STAMP.readAttribute(file)) { if (stream == null) { return null; } long stamp = DataInputOutputUtil.readTIME(stream); long size = DataInputOutputUtil.readLONG(stream); return new IndexingStampInfo(stamp, size); } catch (IOException e) { LOG.error(e); return null; } } @NotNull @Override public KeyDescriptor<Integer> getKeyDescriptor() { return DATA_DESCRIPTOR; } @NotNull @Override public DataExternalizer<SerializedStubTree> getValueExternalizer() { return KEY_EXTERNALIZER; } @NotNull @Override public FileBasedIndex.InputFilter getInputFilter() { return INPUT_FILTER; } @Override public boolean dependsOnFileContent() { return true; } @Override public int getVersion() { return VERSION; } @NotNull @Override public UpdatableIndex<Integer, SerializedStubTree, FileContent> createIndexImplementation(@NotNull final FileBasedIndexExtension<Integer, SerializedStubTree> extension, @NotNull IndexStorage<Integer, SerializedStubTree> storage) throws StorageException, IOException { if (storage instanceof MemoryIndexStorage) { final MemoryIndexStorage<Integer, SerializedStubTree> memStorage = (MemoryIndexStorage<Integer, SerializedStubTree>)storage; memStorage.addBufferingStateListener(new MemoryIndexStorage.BufferingStateListener() { @Override public void bufferingStateChanged(final boolean newState) { ((StubIndexImpl)StubIndex.getInstance()).setDataBufferingEnabled(newState); } @Override public void memoryStorageCleared() { ((StubIndexImpl)StubIndex.getInstance()).cleanupMemoryStorage(); } }); } return new MyIndex(extension, storage); } private static void updateStubIndices(@NotNull final Collection<StubIndexKey> indexKeys, final int inputId, @NotNull final Map<StubIndexKey, Map<Object, StubIdList>> oldStubTree, @NotNull final Map<StubIndexKey, Map<Object, StubIdList>> newStubTree) { final StubIndexImpl stubIndex = (StubIndexImpl)StubIndex.getInstance(); for (StubIndexKey key : indexKeys) { final Map<Object, StubIdList> oldMap = oldStubTree.get(key); final Map<Object, StubIdList> newMap = newStubTree.get(key); final Map<Object, StubIdList> _oldMap = oldMap != null ? oldMap : Collections.emptyMap(); final Map<Object, StubIdList> _newMap = newMap != null ? newMap : Collections.emptyMap(); stubIndex.updateIndex(key, inputId, _oldMap, _newMap); } } @NotNull private static Collection<StubIndexKey> getAffectedIndices(@NotNull final Map<StubIndexKey, Map<Object, StubIdList>> oldStubTree, @NotNull final Map<StubIndexKey, Map<Object, StubIdList>> newStubTree) { Set<StubIndexKey> allIndices = new HashSet<>(); allIndices.addAll(oldStubTree.keySet()); allIndices.addAll(newStubTree.keySet()); return allIndices; } private static @NotNull Map<StubIndexKey, Map<Object, StubIdList>> calcStubIndicesValueMap(SerializedStubTree stub, int fileId) throws StorageException { if (stub == null) return Collections.emptyMap(); Map<StubIndexKey, Map<Object, StubIdList>> stubIndicesValueMap; try { ObjectStubBase root = (ObjectStubBase)stub.getStub(true); ObjectStubTree objectStubTree = root instanceof PsiFileStub ? new StubTree((PsiFileStub)root, false) : new ObjectStubTree(root, false); Map<StubIndexKey, Map<Object, int[]>> map = objectStubTree.indexStubTree(); // xxx:fix refs inplace stubIndicesValueMap = (Map)map; for(StubIndexKey key:map.keySet()) { Map<Object, int[]> value = map.get(key); for(Object k: value.keySet()) { int[] ints = value.get(k); StubIdList stubList = ints.length == 1 ? new StubIdList(ints[0]) : new StubIdList(ints, ints.length); ((Map<Object, StubIdList>)(Map)value).put(k, stubList); } } return stubIndicesValueMap; } catch (SerializerNotFoundException e) { throw new StorageException(e); } } private static class MyIndex extends VfsAwareMapReduceIndex<Integer, SerializedStubTree, FileContent> { private StubIndexImpl myStubIndex; private final StubVersionMap myStubVersionMap = new StubVersionMap(); public MyIndex(FileBasedIndexExtension<Integer, SerializedStubTree> extension, IndexStorage<Integer, SerializedStubTree> storage) throws StorageException, IOException { super(extension, storage); checkNameStorage(); } @NotNull @Override protected UpdateData<Integer, SerializedStubTree> createUpdateData(Map<Integer, SerializedStubTree> data, ThrowableComputable<InputDataDiffBuilder<Integer, SerializedStubTree>, IOException> oldKeys, ThrowableRunnable<IOException> forwardIndexUpdate) { return new StubUpdatingData(data, oldKeys, forwardIndexUpdate); } static class StubUpdatingData extends UpdateData<Integer, SerializedStubTree> { private Collection<Integer> oldStubIndexKeys; public StubUpdatingData(@NotNull Map<Integer, SerializedStubTree> newData, @NotNull ThrowableComputable<InputDataDiffBuilder<Integer, SerializedStubTree>, IOException> iterator, @Nullable ThrowableRunnable<IOException> forwardIndexUpdate) { super(newData, iterator, INDEX_ID, forwardIndexUpdate); } @Override protected ThrowableComputable<InputDataDiffBuilder<Integer, SerializedStubTree>, IOException> getCurrentDataEvaluator() { return () -> { final InputDataDiffBuilder<Integer, SerializedStubTree> diffBuilder = super.getCurrentDataEvaluator().compute(); if (diffBuilder instanceof CollectionInputDataDiffBuilder) { oldStubIndexKeys = ((CollectionInputDataDiffBuilder<Integer, SerializedStubTree>) diffBuilder).getSeq(); } return diffBuilder; }; } public Map<StubIndexKey, Map<Object, StubIdList>> getOldStubIndicesValueMap() { if (oldStubIndexKeys instanceof StubUpdatingIndexKeys) { return ((StubUpdatingIndexKeys)oldStubIndexKeys).myStubIndicesValueMap; } return Collections.emptyMap(); } public Map<StubIndexKey, Map<Object, StubIdList>> getNewStubIndicesValueMap() { Set<Integer> newIndexKeys = getNewData().keySet(); if (newIndexKeys instanceof StubUpdatingIndexKeys) { return ((StubUpdatingIndexKeys)newIndexKeys).myStubIndicesValueMap; } return Collections.emptyMap(); } } @Override protected void doFlush() throws IOException, StorageException { final StubIndexImpl stubIndex = getStubIndex(); try { stubIndex.flush(); } finally { super.doFlush(); } } @Override protected void updateWithMap(int inputId, @NotNull UpdateData<Integer, SerializedStubTree> updateData) throws StorageException { checkNameStorage(); StubUpdatingData stubUpdatingData = (StubUpdatingData)updateData; final Map<StubIndexKey, Map<Object, StubIdList>> newStubIndicesValueMap = stubUpdatingData.getNewStubIndicesValueMap(); try { getWriteLock().lock(); super.updateWithMap(inputId, updateData); final Map<StubIndexKey, Map<Object, StubIdList>> previousStubIndicesValueMap = stubUpdatingData.getOldStubIndicesValueMap(); updateStubIndices( getAffectedIndices(previousStubIndicesValueMap, newStubIndicesValueMap), inputId, previousStubIndicesValueMap, newStubIndicesValueMap ); } finally { getWriteLock().unlock(); } } private StubIndexImpl getStubIndex() { StubIndexImpl index = myStubIndex; if (index == null) { index = myStubIndex = (StubIndexImpl)StubIndex.getInstance(); } return index; } private static void checkNameStorage() throws StorageException { final SerializationManagerEx serializationManager = SerializationManagerEx.getInstanceEx(); if (serializationManager.isNameStorageCorrupted()) { serializationManager.repairNameStorage(); //noinspection ThrowFromFinallyBlock throw new StorageException("NameStorage for stubs serialization has been corrupted"); } } @Override protected void doClear() throws StorageException, IOException { final StubIndexImpl stubIndex = StubIndexImpl.getInstanceOrInvalidate(); if (stubIndex != null) { stubIndex.clearAllIndices(); } myStubVersionMap.clear(); super.doClear(); } @Override protected void doDispose() throws StorageException { try { super.doDispose(); } finally { getStubIndex().dispose(); } } private static final FileAttribute VERSION_STAMP = new FileAttribute("stubIndex.versionStamp", 2, true); @Override public void setIndexedStateForFile(int fileId, @NotNull VirtualFile file) { super.setIndexedStateForFile(fileId, file); try { DataOutputStream stream = FSRecords.writeAttribute(fileId, VERSION_STAMP); DataInputOutputUtil.writeINT(stream, myStubVersionMap.getIndexingTimestampDiffForFileType(file.getFileType())); stream.close(); } catch (IOException e) { LOG.error(e); } } @Override public boolean isIndexedStateForFile(int fileId, @NotNull VirtualFile file) { boolean indexedStateForFile = super.isIndexedStateForFile(fileId, file); if (!indexedStateForFile) return false; try { DataInputStream stream = FSRecords.readAttributeWithLock(fileId, VERSION_STAMP); int diff = stream != null ? DataInputOutputUtil.readINT(stream) : 0; if (diff == 0) return false; FileType fileType = myStubVersionMap.getFileTypeByIndexingTimestampDiff(diff); return fileType != null && myStubVersionMap.getStamp(file.getFileType()) == myStubVersionMap.getStamp(fileType); } catch (IOException e) { LOG.error(e); return false; } } } }
package ca.uhn.fhir.rest.server; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.dstu2.resource.Patient; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.InstantDt; import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.rest.annotation.ConditionalUrlParam; import ca.uhn.fhir.rest.annotation.IdParam; import ca.uhn.fhir.rest.annotation.OptionalParam; import ca.uhn.fhir.rest.annotation.ResourceParam; import ca.uhn.fhir.rest.annotation.Search; import ca.uhn.fhir.rest.annotation.Update; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.MethodOutcome; import ca.uhn.fhir.test.utilities.JettyUtil; import ca.uhn.fhir.util.TestUtil; import org.apache.commons.io.IOUtils; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletHandler; import org.eclipse.jetty.servlet.ServletHolder; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; public class UpdateDstu2Test { private static CloseableHttpClient ourClient; private static FhirContext ourCtx = FhirContext.forDstu2(); private static String ourLastConditionalUrl; private static IdDt ourLastId; private static IdDt ourLastIdParam; private static boolean ourLastRequestWasSearch; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(UpdateDstu2Test.class); private static int ourPort; private static Server ourServer; private static InstantDt ourSetLastUpdated; @BeforeEach public void before() { ourLastId = null; ourLastConditionalUrl = null; ourLastIdParam = null; ourLastRequestWasSearch = false; } @Test public void testSearchStillWorks() throws Exception { Patient patient = new Patient(); patient.addIdentifier().setValue("002"); HttpGet httpGet = new HttpGet("http://localhost:" + ourPort + "/Patient?_pretty=true"); HttpResponse status = ourClient.execute(httpGet); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); ourLog.info("Response was:\n{}", responseContent); assertTrue(ourLastRequestWasSearch); assertNull(ourLastId); assertNull(ourLastIdParam); assertNull(ourLastConditionalUrl); } @Test public void testUpdateWithConditionalUrl() throws Exception { Patient patient = new Patient(); patient.addIdentifier().setValue("002"); HttpPut httpPost = new HttpPut("http://localhost:" + ourPort + "/Patient?identifier=system%7C001"); httpPost.setEntity(new StringEntity(ourCtx.newXmlParser().encodeResourceToString(patient), ContentType.create(Constants.CT_FHIR_XML, "UTF-8"))); HttpResponse status = ourClient.execute(httpPost); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); ourLog.info("Response was:\n{}", responseContent); assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(null, status.getFirstHeader("location")); assertEquals("http://localhost:" + ourPort + "/Patient/001/_history/002", status.getFirstHeader("content-location").getValue()); assertNull(ourLastId.getValue()); assertNull(ourLastIdParam); assertEquals("Patient?identifier=system%7C001", ourLastConditionalUrl); } @Test public void testUpdateWithoutConditionalUrl() throws Exception { Patient patient = new Patient(); patient.setId("2"); patient.addIdentifier().setValue("002"); HttpPut httpPost = new HttpPut("http://localhost:" + ourPort + "/Patient/2"); httpPost.setEntity(new StringEntity(ourCtx.newXmlParser().encodeResourceToString(patient), ContentType.create(Constants.CT_FHIR_XML, "UTF-8"))); HttpResponse status = ourClient.execute(httpPost); String responseContent = IOUtils.toString(status.getEntity().getContent()); IOUtils.closeQuietly(status.getEntity().getContent()); ourLog.info("Response was:\n{}", responseContent); assertEquals(200, status.getStatusLine().getStatusCode()); assertEquals(null, status.getFirstHeader("location")); assertEquals("http://localhost:" + ourPort + "/Patient/001/_history/002", status.getFirstHeader("content-location").getValue()); assertEquals("Patient/2", ourLastId.toUnqualified().getValue()); assertEquals("Patient/2", ourLastIdParam.toUnqualified().getValue()); assertNull(ourLastConditionalUrl); } @AfterAll public static void afterClassClearContext() throws Exception { JettyUtil.closeServer(ourServer); TestUtil.clearAllStaticFieldsForUnitTest(); } @BeforeAll public static void beforeClass() throws Exception { ourServer = new Server(0); PatientProvider patientProvider = new PatientProvider(); ServletHandler proxyHandler = new ServletHandler(); RestfulServer servlet = new RestfulServer(ourCtx); servlet.setResourceProviders(patientProvider); ServletHolder servletHolder = new ServletHolder(servlet); proxyHandler.addServletWithMapping(servletHolder, "/*"); ourServer.setHandler(proxyHandler); JettyUtil.startServer(ourServer); ourPort = JettyUtil.getPortForStartedServer(ourServer); PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(5000, TimeUnit.MILLISECONDS); HttpClientBuilder builder = HttpClientBuilder.create(); builder.setConnectionManager(connectionManager); ourClient = builder.build(); } public static class PatientProvider implements IResourceProvider { @Override public Class<? extends IResource> getResourceType() { return Patient.class; } @Search public List<IResource> search(@OptionalParam(name="foo") StringDt theString) { ourLastRequestWasSearch = true; return new ArrayList<IResource>(); } @Update() public MethodOutcome updatePatient(@ResourceParam Patient thePatient, @ConditionalUrlParam String theConditional, @IdParam IdDt theIdParam) { ourLastConditionalUrl = theConditional; ourLastId = thePatient.getId(); ourLastIdParam = theIdParam; MethodOutcome retVal = new MethodOutcome(new IdDt("Patient/001/_history/002")); ResourceMetadataKeyEnum.UPDATED.put(thePatient, ourSetLastUpdated); retVal.setResource(thePatient); return retVal; } } }
/* Copyright 2011 Karl-Michael Schneider Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.jwatter.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.NoSuchElementException; import java.util.Set; import org.junit.Test; import org.jwatter.util.AppendableIterator; public class AppendableIteratorTest { @Test public void testAppendableIterator () { AppendableIterator<String> i = new AppendableIterator<String>(); Collection<String> c = i.getUnderlyingCollection(); assertEquals(HashSet.class, c.getClass()); assertEquals(0, c.size()); } public void testAppendableIteratorSet () { Set<String> s = new HashSet<String>(); s.add("a"); s.add("b"); s.add("c"); AppendableIterator<String> i = new AppendableIterator<String>(s); Set<String> t = new HashSet<String>(); while ( i.hasNext() ) { String e = i.next(); assertTrue(s.contains(e)); t.add(e); } assertEquals(s, t); } public void testAppendableIteratorList () { List<String> s = new LinkedList<String>(); s.add("a"); s.add("b"); s.add("a"); s.add("c"); AppendableIterator<String> i = new AppendableIterator<String>(s); assertTrue(s == i.getUnderlyingCollection()); assertEquals("a", i.next()); assertEquals("b", i.next()); assertEquals("a", i.next()); assertEquals("c", i.next()); assertFalse(i.hasNext()); } @Test(expected = NullPointerException.class) public void testAppendableIteratorCollectionNull () { new AppendableIterator<String>(null); } @Test public void testAddSet () { AppendableIterator<String> i = new AppendableIterator<String>(); assertTrue(i.add("a")); assertTrue(i.add("b")); assertFalse(i.add("a")); assertTrue(i.add("c")); assertEquals(3, i.getUnderlyingCollection().size()); assertEquals("a", i.next()); assertEquals("b", i.next()); assertEquals("a", i.next()); assertEquals("c", i.next()); assertFalse(i.hasNext()); } @Test public void testAddList () { AppendableIterator<String> i = new AppendableIterator<String>(new LinkedList<String>()); assertTrue(i.add("a")); assertTrue(i.add("b")); assertTrue(i.add("a")); assertTrue(i.add("c")); assertEquals(4, i.getUnderlyingCollection().size()); assertEquals("a", i.next()); assertEquals("b", i.next()); assertEquals("a", i.next()); assertEquals("c", i.next()); assertFalse(i.hasNext()); } @Test public void testAddNewSet () { AppendableIterator<String> i = new AppendableIterator<String>(); assertTrue(i.addNew("a")); assertTrue(i.addNew("b")); assertFalse(i.addNew("a")); assertTrue(i.addNew("c")); assertEquals(3, i.getUnderlyingCollection().size()); assertEquals("a", i.next()); assertEquals("b", i.next()); assertEquals("c", i.next()); assertFalse(i.hasNext()); } @Test public void testAddNewList () { AppendableIterator<String> i = new AppendableIterator<String>(new LinkedList<String>()); assertTrue(i.addNew("a")); assertTrue(i.addNew("b")); assertTrue(i.addNew("a")); assertTrue(i.addNew("c")); assertEquals(4, i.getUnderlyingCollection().size()); assertEquals("a", i.next()); assertEquals("b", i.next()); assertEquals("a", i.next()); assertEquals("c", i.next()); assertFalse(i.hasNext()); } @Test public void testAddAllSet () { List<String> c = new LinkedList<String>(); c.add("a"); c.add("b"); c.add("a"); c.add("c"); AppendableIterator<String> i = new AppendableIterator<String>(); assertTrue(i.addAll(c)); assertEquals(3, i.getUnderlyingCollection().size()); assertTrue(i.getUnderlyingCollection().containsAll(c)); assertEquals("a", i.next()); assertEquals("b", i.next()); assertEquals("a", i.next()); assertEquals("c", i.next()); assertFalse(i.hasNext()); } @Test public void testAddAllList () { List<String> c = new LinkedList<String>(); c.add("a"); c.add("b"); c.add("a"); c.add("c"); AppendableIterator<String> i = new AppendableIterator<String>(new LinkedList<String>()); assertTrue(i.addAll(c)); assertEquals(c, i.getUnderlyingCollection()); assertEquals("a", i.next()); assertEquals("b", i.next()); assertEquals("a", i.next()); assertEquals("c", i.next()); assertFalse(i.hasNext()); } @Test public void testAddAllNewSet () { List<String> c = new LinkedList<String>(); c.add("a"); c.add("b"); c.add("a"); c.add("c"); AppendableIterator<String> i = new AppendableIterator<String>(); assertTrue(i.add("c")); assertTrue(i.addAllNew(c)); assertEquals(3, i.getUnderlyingCollection().size()); assertTrue(i.getUnderlyingCollection().containsAll(c)); assertEquals("c", i.next()); assertEquals("a", i.next()); assertEquals("b", i.next()); assertFalse(i.hasNext()); } @Test public void testAddAllNewList () { List<String> c = new LinkedList<String>(); c.add("a"); c.add("b"); c.add("a"); c.add("c"); AppendableIterator<String> i = new AppendableIterator<String>(new LinkedList<String>()); assertTrue(i.add("c")); assertTrue(i.addAll(c)); assertEquals(5, i.getUnderlyingCollection().size()); assertEquals("c", i.next()); assertEquals("a", i.next()); assertEquals("b", i.next()); assertEquals("a", i.next()); assertEquals("c", i.next()); assertFalse(i.hasNext()); } @Test public void testAddSetAfterNext () { AppendableIterator<String> i = new AppendableIterator<String>(); assertFalse(i.hasNext()); assertTrue(i.add("a")); assertTrue(i.hasNext()); assertEquals("a", i.next()); assertFalse(i.hasNext()); assertFalse(i.add("a")); assertTrue(i.hasNext()); assertEquals("a", i.next()); assertFalse(i.hasNext()); assertEquals(1, i.getUnderlyingCollection().size()); assertTrue(i.getUnderlyingCollection().contains("a")); } @Test public void testAddListAfterNext () { AppendableIterator<String> i = new AppendableIterator<String>(new LinkedList<String>()); assertFalse(i.hasNext()); assertTrue(i.add("a")); assertTrue(i.hasNext()); assertEquals("a", i.next()); assertFalse(i.hasNext()); assertTrue(i.add("a")); assertTrue(i.hasNext()); assertEquals("a", i.next()); assertFalse(i.hasNext()); assertEquals(2, i.getUnderlyingCollection().size()); } @Test public void testAddNewSetAfterNext () { AppendableIterator<String> i = new AppendableIterator<String>(); assertFalse(i.hasNext()); assertTrue(i.addNew("a")); assertTrue(i.hasNext()); assertEquals("a", i.next()); assertFalse(i.hasNext()); assertFalse(i.addNew("a")); assertFalse(i.hasNext()); assertEquals(1, i.getUnderlyingCollection().size()); assertTrue(i.getUnderlyingCollection().contains("a")); } @Test public void testAddNewListAfterNext () { AppendableIterator<String> i = new AppendableIterator<String>(new LinkedList<String>()); assertFalse(i.hasNext()); assertTrue(i.addNew("a")); assertTrue(i.hasNext()); assertEquals("a", i.next()); assertFalse(i.hasNext()); assertTrue(i.addNew("a")); assertTrue(i.hasNext()); assertEquals("a", i.next()); assertFalse(i.hasNext()); assertEquals(2, i.getUnderlyingCollection().size()); } @Test public void testAddAllSetTwice () { List<String> c = new LinkedList<String>(); c.add("a"); c.add("b"); c.add("a"); c.add("c"); AppendableIterator<String> i = new AppendableIterator<String>(); assertTrue(i.addAll(c)); assertFalse(i.addAll(c)); assertEquals(3, i.getUnderlyingCollection().size()); int n = 0; for ( ; i.hasNext() ; i.next() , n++ ) ; assertEquals(8, n); } @Test public void testAddAllListTwice () { List<String> c = new LinkedList<String>(); c.add("a"); c.add("b"); c.add("a"); c.add("c"); AppendableIterator<String> i = new AppendableIterator<String>(new LinkedList<String>()); assertTrue(i.addAll(c)); assertTrue(i.addAll(c)); assertEquals(8, i.getUnderlyingCollection().size()); int n = 0; for ( ; i.hasNext() ; i.next() , n++ ) ; assertEquals(8, n); } @Test public void testAddAllNewSetTwice () { List<String> c = new LinkedList<String>(); c.add("a"); c.add("b"); c.add("a"); c.add("c"); AppendableIterator<String> i = new AppendableIterator<String>(); assertTrue(i.addAllNew(c)); assertFalse(i.addAllNew(c)); assertEquals(3, i.getUnderlyingCollection().size()); int n = 0; for ( ; i.hasNext() ; i.next() , n++ ) ; assertEquals(3, n); } @Test public void testAddAllNewListTwice () { List<String> c = new LinkedList<String>(); c.add("a"); c.add("b"); c.add("a"); c.add("c"); AppendableIterator<String> i = new AppendableIterator<String>(new LinkedList<String>()); assertTrue(i.addAllNew(c)); assertTrue(i.addAllNew(c)); assertEquals(8, i.getUnderlyingCollection().size()); int n = 0; for ( ; i.hasNext() ; i.next() , n++ ) ; assertEquals(8, n); } @Test public void testHasNext () { AppendableIterator<String> i = new AppendableIterator<String>(); assertFalse(i.hasNext()); i.add("a"); assertTrue(i.hasNext()); assertEquals("a", i.next()); assertFalse(i.hasNext()); i.add("b"); i.add("c"); assertTrue(i.hasNext()); i.next(); assertTrue(i.hasNext()); i.next(); assertFalse(i.hasNext()); } @Test(expected = NoSuchElementException.class) public void testNextEmpty () { new AppendableIterator<String>().next(); } @Test public void testRemove () { AppendableIterator<String> i = new AppendableIterator<String>(); i.add("a"); i.add("b"); i.add("c"); i.next(); String removed = i.next(); i.remove(); assertTrue(i.hasNext()); i.next(); assertFalse(i.hasNext()); assertEquals(2, i.getUnderlyingCollection().size()); assertFalse(i.getUnderlyingCollection().contains(removed)); } @Test(expected = IllegalStateException.class) public void testRemoveEmpty () { new AppendableIterator<String>().remove(); } @Test(expected = IllegalStateException.class) public void testRemove2 () { AppendableIterator<String> i = new AppendableIterator<String>(); i.add("a"); i.next(); i.remove(); i.remove(); } @Test public void testGetUnderlyingCollection () { Collection<String> s = new LinkedList<String>(); assertTrue(s == new AppendableIterator<String>(s) .getUnderlyingCollection()); } }
/* * Copyright (c) 2012 Jan Kotek * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mapdb; import java.io.IOError; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.logging.Level; import java.util.zip.CRC32; /** * Write-Ahead-Log */ public class StoreWAL extends StoreDirect { protected static final long LOG_MASK_OFFSET = 0x0000FFFFFFFFFFFFL; protected static final byte WAL_INDEX_LONG = 101; protected static final byte WAL_LONGSTACK_PAGE = 102; protected static final byte WAL_PHYS_ARRAY_ONE_LONG = 103; protected static final byte WAL_PHYS_ARRAY = 104; protected static final byte WAL_SKIP_REST_OF_BLOCK = 105; /** last instruction in log file */ protected static final byte WAL_SEAL = 111; /** added to offset 8 into log file, indicates that log was synced and closed*/ protected static final long LOG_SEAL = 4566556446554645L; public static final String TRANS_LOG_FILE_EXT = ".t"; protected static final long[] TOMBSTONE = new long[0]; protected static final long[] PREALLOC = new long[0]; protected final Volume.Factory volFac; protected Volume log; protected volatile long logSize; protected final LongConcurrentHashMap<long[]> modified = new LongConcurrentHashMap<long[]>(); protected final LongMap<byte[]> longStackPages = new LongHashMap<byte[]>(); protected final long[] indexVals = new long[IO_USER_START/8]; protected final boolean[] indexValsModified = new boolean[indexVals.length]; protected boolean replayPending = true; protected final AtomicInteger logChecksum = new AtomicInteger(); public StoreWAL(Volume.Factory volFac) { this(volFac, false, false, 5, false, 0L, false, false, null,false,0); } public StoreWAL(Volume.Factory volFac, boolean readOnly, boolean deleteFilesAfterClose, int spaceReclaimMode, boolean syncOnCommitDisabled, long sizeLimit, boolean checksum, boolean compress, byte[] password, boolean disableLocks, int sizeIncrement) { super(volFac, readOnly, deleteFilesAfterClose, spaceReclaimMode, syncOnCommitDisabled, sizeLimit, checksum, compress, password,disableLocks, sizeIncrement); this.volFac = volFac; this.log = volFac.createTransLogVolume(); boolean allGood = false; structuralLock.lock(); try{ reloadIndexFile(); if(verifyLogFile()){ replayLogFile(); } replayPending = false; checkHeaders(); if(!readOnly) logReset(); allGood = true; }finally{ if(!allGood) { //exception was thrown, try to unlock files if (log!=null) { log.close(); log = null; } if (index!=null) { index.close(); index = null; } if (phys!=null) { phys.close(); phys = null; } } structuralLock.unlock(); } } @Override protected void checkHeaders() { if(replayPending) return; super.checkHeaders(); } protected void reloadIndexFile() { assert(structuralLock.isHeldByCurrentThread()); logSize = 16; modified.clear(); longStackPages.clear(); indexSize = index.getLong(IO_INDEX_SIZE); physSize = index.getLong(IO_PHYS_SIZE); freeSize = index.getLong(IO_FREE_SIZE); for(int i = 0;i<IO_USER_START;i+=8){ indexVals[i/8] = index.getLong(i); } Arrays.fill(indexValsModified, false); logChecksum.set(0); maxUsedIoList=IO_USER_START-8; while(indexVals[((int) (maxUsedIoList / 8))]!=0 && maxUsedIoList>IO_FREE_RECID) maxUsedIoList-=8; } protected void logReset() { assert(structuralLock.isHeldByCurrentThread()); log.truncate(16); log.ensureAvailable(16); log.putInt(0, HEADER); log.putUnsignedShort(4, STORE_VERSION); log.putUnsignedShort(6, expectedMasks()); log.putLong(8, 0L); logSize = 16; } @Override public long preallocate() { final long ioRecid; final long logPos; newRecidLock.readLock().lock(); try{ structuralLock.lock(); try{ checkLogRounding(); ioRecid = freeIoRecidTake(false); logPos = logSize; //now get space in log logSize+=1+8+8; //space used for index val log.ensureAvailable(logSize); }finally{ structuralLock.unlock(); } final Lock lock = locks[Store.lockPos(ioRecid)].writeLock(); lock.lock(); try{ //write data into log walIndexVal(logPos, ioRecid, MASK_DISCARD); modified.put(ioRecid, PREALLOC); }finally{ lock.unlock(); } }finally{ newRecidLock.readLock().unlock(); } long recid = (ioRecid-IO_USER_START)/8; assert(recid>0); return recid; } @Override public void preallocate(final long[] recids) { for(int i=0;i<recids.length;i++){ recids[i] = preallocate(); } } @Override public <A> long put(A value, Serializer<A> serializer) { assert(value!=null); DataOutput2 out = serialize(value, serializer); final long ioRecid; final long[] physPos; final long[] logPos; newRecidLock.readLock().lock(); try{ structuralLock.lock(); try{ ioRecid = freeIoRecidTake(false); //first get space in phys physPos = physAllocate(out.pos,false,false); //now get space in log logPos = logAllocate(physPos); }finally{ structuralLock.unlock(); } final Lock lock = locks[Store.lockPos(ioRecid)].writeLock(); lock.lock(); try{ //write data into log walIndexVal((logPos[0]&LOG_MASK_OFFSET) - 1-8-8-1-8, ioRecid, physPos[0]|MASK_ARCHIVE); walPhysArray(out, physPos, logPos); modified.put(ioRecid,logPos); recycledDataOuts.offer(out); }finally{ lock.unlock(); } }finally{ newRecidLock.readLock().unlock(); } long recid = (ioRecid-IO_USER_START)/8; assert(recid>0); return recid; } protected void walPhysArray(DataOutput2 out, long[] physPos, long[] logPos) { //write byte[] data int outPos = 0; int logC = 0; CRC32 crc32 = new CRC32(); for(int i=0;i<logPos.length;i++){ int c = i==logPos.length-1 ? 0: 8; final long pos = logPos[i]&LOG_MASK_OFFSET; int size = (int) (logPos[i]>>>48); byte header = c==0 ? WAL_PHYS_ARRAY : WAL_PHYS_ARRAY_ONE_LONG; log.putByte(pos - 8 - 1, header); log.putLong(pos - 8, physPos[i]); if(c>0){ log.putLong(pos, physPos[i + 1]); } log.putData(pos+c, out.buf, outPos, size - c); crc32.reset(); crc32.update(out.buf,outPos, size-c); logC |= LongHashMap.longHash( pos | header | physPos[i] | (c>0?physPos[i+1]:0) | crc32.getValue()); outPos +=size-c; assert(logSize>=outPos); } logChecksumAdd(logC); assert(outPos==out.pos); } protected void walIndexVal(long logPos, long ioRecid, long indexVal) { assert(locks[Store.lockPos(ioRecid)].writeLock().isHeldByCurrentThread()); assert(logSize>=logPos+1+8+8); log.putByte(logPos, WAL_INDEX_LONG); log.putLong(logPos + 1, ioRecid); log.putLong(logPos + 9, indexVal); logChecksumAdd(LongHashMap.longHash(logPos | WAL_INDEX_LONG | ioRecid | indexVal)); } protected long[] logAllocate(long[] physPos) { assert(structuralLock.isHeldByCurrentThread()); logSize+=1+8+8; //space used for index val long[] ret = new long[physPos.length]; for(int i=0;i<physPos.length;i++){ long size = physPos[i]>>>48; //would overlaps Volume Block? logSize+=1+8; //space used for WAL_PHYS_ARRAY ret[i] = (size<<48) | logSize; logSize+=size; checkLogRounding(); } log.ensureAvailable(logSize); return ret; } protected void checkLogRounding() { assert(structuralLock.isHeldByCurrentThread()); if((logSize&CHUNK_SIZE_MOD_MASK)+MAX_REC_SIZE*2>CHUNK_SIZE){ log.ensureAvailable(logSize+1); log.putByte(logSize, WAL_SKIP_REST_OF_BLOCK); logSize += CHUNK_SIZE - (logSize&CHUNK_SIZE_MOD_MASK); } } @Override public <A> A get(long recid, Serializer<A> serializer) { assert(recid>0); final long ioRecid = IO_USER_START + recid*8; final Lock lock = locks[Store.lockPos(ioRecid)].readLock(); lock.lock(); try{ return get2(ioRecid, serializer); }catch(IOException e){ throw new IOError(e); }finally{ lock.unlock(); } } @Override protected <A> A get2(long ioRecid, Serializer<A> serializer) throws IOException { assert(locks[Store.lockPos(ioRecid)].getWriteHoldCount()==0|| locks[Store.lockPos(ioRecid)].writeLock().isHeldByCurrentThread()); //check if record was modified in current transaction long[] r = modified.get(ioRecid); //no, read main version if(r==null) return super.get2(ioRecid, serializer); //check for tombstone (was deleted in current trans) if(r==TOMBSTONE || r==PREALLOC || r.length==0) return null; //was modified in current transaction, so read it from trans log if(r.length==1){ //single record final int size = (int) (r[0]>>>48); DataInput2 in = (DataInput2) log.getDataInput(r[0]&LOG_MASK_OFFSET, size); return deserialize(serializer,size,in); }else{ //linked record int totalSize = 0; for(int i=0;i<r.length;i++){ int c = i==r.length-1 ? 0: 8; totalSize+= (int) (r[i]>>>48)-c; } byte[] b = new byte[totalSize]; int pos = 0; for(int i=0;i<r.length;i++){ int c = i==r.length-1 ? 0: 8; int size = (int) (r[i]>>>48) -c; log.getDataInput((r[i] & LOG_MASK_OFFSET) + c, size).readFully(b,pos,size); pos+=size; } if(pos!=totalSize)throw new AssertionError(); return deserialize(serializer,totalSize, new DataInput2(b)); } } @Override public <A> void update(long recid, A value, Serializer<A> serializer) { assert(recid>0); assert(value!=null); DataOutput2 out = serialize(value, serializer); final long ioRecid = IO_USER_START + recid*8; final Lock lock = locks[Store.lockPos(ioRecid)].writeLock(); lock.lock(); try{ final long[] physPos; final long[] logPos; long indexVal = 0; long[] linkedRecords = getLinkedRecordsFromLog(ioRecid); if(linkedRecords==null){ indexVal = index.getLong(ioRecid); linkedRecords = getLinkedRecordsIndexVals(indexVal); }else if(linkedRecords == PREALLOC){ linkedRecords = null; } structuralLock.lock(); try{ //free first record pointed from indexVal if((indexVal>>>48)>0) freePhysPut(indexVal,false); //if there are more linked records, free those as well if(linkedRecords!=null){ for(int i=0; i<linkedRecords.length &&linkedRecords[i]!=0;i++){ freePhysPut(linkedRecords[i],false); } } //first get space in phys physPos = physAllocate(out.pos,false,false); //now get space in log logPos = logAllocate(physPos); }finally{ structuralLock.unlock(); } //write data into log walIndexVal((logPos[0]&LOG_MASK_OFFSET) - 1-8-8-1-8, ioRecid, physPos[0]|MASK_ARCHIVE); walPhysArray(out, physPos, logPos); modified.put(ioRecid,logPos); }finally{ lock.unlock(); } recycledDataOuts.offer(out); } @Override public <A> boolean compareAndSwap(long recid, A expectedOldValue, A newValue, Serializer<A> serializer) { assert(recid>0); assert(expectedOldValue!=null && newValue!=null); final long ioRecid = IO_USER_START + recid*8; final Lock lock = locks[Store.lockPos(ioRecid)].writeLock(); lock.lock(); DataOutput2 out; try{ A oldVal = get2(ioRecid,serializer); if((oldVal == null && expectedOldValue!=null) || (oldVal!=null && !oldVal.equals(expectedOldValue))) return false; out = serialize(newValue, serializer); final long[] physPos; final long[] logPos; long indexVal = 0; long[] linkedRecords = getLinkedRecordsFromLog(ioRecid); if(linkedRecords==null){ indexVal = index.getLong(ioRecid); linkedRecords = getLinkedRecordsIndexVals(indexVal); } structuralLock.lock(); try{ //free first record pointed from indexVal if((indexVal>>>48)>0) freePhysPut(indexVal,false); //if there are more linked records, free those as well if(linkedRecords!=null){ for(int i=0; i<linkedRecords.length &&linkedRecords[i]!=0;i++){ freePhysPut(linkedRecords[i],false); } } //first get space in phys physPos = physAllocate(out.pos,false,false); //now get space in log logPos = logAllocate(physPos); }finally{ structuralLock.unlock(); } //write data into log walIndexVal((logPos[0]&LOG_MASK_OFFSET) - 1-8-8-1-8, ioRecid, physPos[0]|MASK_ARCHIVE); walPhysArray(out, physPos, logPos); modified.put(ioRecid,logPos); }catch(IOException e){ throw new IOError(e); }finally{ lock.unlock(); } recycledDataOuts.offer(out); return true; } @Override public <A> void delete(long recid, Serializer<A> serializer) { assert(recid>0); final long ioRecid = IO_USER_START + recid*8; final Lock lock = locks[Store.lockPos(ioRecid)].writeLock(); lock.lock(); try{ final long logPos; long indexVal = 0; long[] linkedRecords = getLinkedRecordsFromLog(ioRecid); if(linkedRecords==null){ indexVal = index.getLong(ioRecid); if(indexVal==MASK_DISCARD) return; linkedRecords = getLinkedRecordsIndexVals(indexVal); } structuralLock.lock(); try{ checkLogRounding(); logPos = logSize; logSize+=1+8+8; //space used for index val log.ensureAvailable(logSize); longStackPut(IO_FREE_RECID, ioRecid,false); //free first record pointed from indexVal if((indexVal>>>48)>0) freePhysPut(indexVal,false); //if there are more linked records, free those as well if(linkedRecords!=null){ for(int i=0; i<linkedRecords.length &&linkedRecords[i]!=0;i++){ freePhysPut(linkedRecords[i],false); } } }finally { structuralLock.unlock(); } walIndexVal(logPos,ioRecid,0|MASK_ARCHIVE); modified.put(ioRecid, TOMBSTONE); }finally { lock.unlock(); } } @Override public void commit() { lockAllWrite(); try{ if(serializerPojo!=null && serializerPojo.hasUnsavedChanges()){ serializerPojo.save(this); } if(!logDirty()){ return; } //dump long stack pages int crc = 0; LongMap.LongMapIterator<byte[]> iter = longStackPages.longMapIterator(); while(iter.moveToNext()){ assert(iter.key()>>>48==0); final byte[] array = iter.value(); final long pageSize = ((array[0]&0xFF)<<8)|(array[1]&0xFF) ; assert(array.length==pageSize); final long firstVal = (pageSize<<48)|iter.key(); log.ensureAvailable(logSize+1+8+pageSize); crc |= LongHashMap.longHash(logSize|WAL_LONGSTACK_PAGE|firstVal); log.putByte(logSize, WAL_LONGSTACK_PAGE); logSize+=1; log.putLong(logSize, firstVal); logSize+=8; //put array CRC32 crc32 = new CRC32(); crc32.update(array); crc |= crc32.getValue(); log.putData(logSize,array,0,array.length); logSize+=array.length; checkLogRounding(); } for(int i=IO_FREE_RECID;i<IO_USER_START;i+=8){ if(!indexValsModified[i/8]) continue; log.ensureAvailable(logSize + 17); logSize+=17; walIndexVal(logSize-17, i,indexVals[i/8]); //no need to update crc, since IndexVal already does it } //seal log file log.ensureAvailable(logSize + 1 + 3*6 + 8+4); long indexChecksum = indexHeaderChecksumUncommited(); crc|=LongHashMap.longHash(logSize|WAL_SEAL|indexSize|physSize|freeSize|indexChecksum); log.putByte(logSize, WAL_SEAL); logSize+=1; log.putSixLong(logSize, indexSize); logSize+=6; log.putSixLong(logSize,physSize); logSize+=6; log.putSixLong(logSize,freeSize); logSize+=6; log.putLong(logSize, indexChecksum); logSize+=8; log.putInt(logSize, crc|logChecksum.get()); logSize+=4; //write mark it was sealed log.putLong(8, LOG_SEAL); //and flush log file if(!syncOnCommitDisabled) log.sync(); replayLogFile(); reloadIndexFile(); }finally { unlockAllWrite(); } } protected boolean logDirty() { if(logSize!=16 || !longStackPages.isEmpty() || !modified.isEmpty()) return true; for(boolean b: indexValsModified){ if(b) return true; } return false; } protected long indexHeaderChecksumUncommited() { long ret = 0; for(int offset = 0;offset<IO_USER_START;offset+=8){ if(offset == IO_INDEX_SUM) continue; long indexVal; if(offset==IO_INDEX_SIZE){ indexVal = indexSize; }else if(offset==IO_PHYS_SIZE){ indexVal = physSize; }else if(offset==IO_FREE_SIZE){ indexVal = freeSize; }else indexVal = indexVals[offset / 8]; ret |= indexVal | LongHashMap.longHash(indexVal|offset) ; } return ret; } protected boolean verifyLogFile() { assert(structuralLock.isHeldByCurrentThread()); if(readOnly && log==null) return false; logSize = 0; //read headers if (log.isEmpty() || (log.getFile()!=null && log.getFile().length()<16) || log.getInt(0) != HEADER || log.getLong(8) != LOG_SEAL) { return false; } if (log.getUnsignedShort(4) > STORE_VERSION) { throw new IOError(new IOException("New store format version, please use newer MapDB version")); } if (log.getUnsignedShort(6) != expectedMasks()) throw new IllegalArgumentException("Log file created with different features. Please check compression, checksum or encryption"); try { final CRC32 crc32 = new CRC32(); //all good, calculate checksum logSize = 16; byte ins = log.getByte(logSize); logSize += 1; int crc = 0; while (ins != WAL_SEAL){ if (ins == WAL_INDEX_LONG) { long ioRecid = log.getLong(logSize); logSize += 8; long indexVal = log.getLong(logSize); logSize += 8; crc |= LongHashMap.longHash((logSize - 1 - 8 - 8) | WAL_INDEX_LONG | ioRecid | indexVal); } else if (ins == WAL_PHYS_ARRAY) { final long offset2 = log.getLong(logSize); logSize += 8; final int size = (int) (offset2 >>> 48); byte[] b = new byte[size]; log.getDataInput(logSize, size).readFully(b); crc32.reset(); crc32.update(b); crc |= LongHashMap.longHash(logSize | WAL_PHYS_ARRAY | offset2 | crc32.getValue()); logSize += size; } else if (ins == WAL_PHYS_ARRAY_ONE_LONG) { final long offset2 = log.getLong(logSize); logSize += 8; final int size = (int) (offset2 >>> 48) - 8; final long nextPageLink = log.getLong(logSize); logSize += 8; byte[] b = new byte[size]; log.getDataInput(logSize, size).readFully(b); crc32.reset(); crc32.update(b); crc |= LongHashMap.longHash((logSize) | WAL_PHYS_ARRAY_ONE_LONG | offset2 | nextPageLink | crc32.getValue()); logSize += size; } else if (ins == WAL_LONGSTACK_PAGE) { final long offset = log.getLong(logSize); logSize += 8; final long origLogSize = logSize; final int size = (int) (offset >>> 48); crc |= LongHashMap.longHash(origLogSize | WAL_LONGSTACK_PAGE | offset); byte[] b = new byte[size]; log.getDataInput(logSize, size).readFully(b); crc32.reset(); crc32.update(b); crc |= crc32.getValue(); log.getDataInput(logSize, size).readFully(b); logSize+=size; } else if (ins == WAL_SKIP_REST_OF_BLOCK) { logSize += CHUNK_SIZE - (logSize & CHUNK_SIZE_MOD_MASK); } else { return false; } ins = log.getByte(logSize); logSize += 1; } long indexSize = log.getSixLong(logSize); logSize += 6; long physSize = log.getSixLong(logSize); logSize += 6; long freeSize = log.getSixLong(logSize); logSize += 6; long indexSum = log.getLong(logSize); logSize += 8; crc |= LongHashMap.longHash((logSize - 1 - 3 * 6 - 8) | indexSize | physSize | freeSize | indexSum); final int realCrc = log.getInt(logSize); logSize += 4; logSize = 0; assert (structuralLock.isHeldByCurrentThread()); //checksum is broken, so disable it return true; } catch (IOException e) { if(CC.LOG_STORE) LOG.log(Level.INFO, "Rollback corrupted log.",e); return false; }catch(IOError e){ if(CC.LOG_STORE) LOG.log(Level.INFO, "Rollback corrupted log.",e); return false; } } protected void replayLogFile(){ assert(structuralLock.isHeldByCurrentThread()); if(readOnly && log==null) return; //TODO how to handle log replay if we are readonly? logSize = 0; //read headers if(log.isEmpty() || log.getInt(0)!=HEADER || log.getUnsignedShort(4)>STORE_VERSION || log.getLong(8) !=LOG_SEAL || log.getUnsignedShort(6)!=expectedMasks()){ //wrong headers, discard log logReset(); return; } //all good, start replay logSize=16; byte ins = log.getByte(logSize); logSize+=1; while(ins!=WAL_SEAL){ if(ins == WAL_INDEX_LONG){ long ioRecid = log.getLong(logSize); logSize+=8; long indexVal = log.getLong(logSize); logSize+=8; index.ensureAvailable(ioRecid+8); index.putLong(ioRecid, indexVal); }else if(ins == WAL_PHYS_ARRAY||ins == WAL_LONGSTACK_PAGE || ins == WAL_PHYS_ARRAY_ONE_LONG){ long offset = log.getLong(logSize); logSize+=8; final int size = (int) (offset>>>48); offset = offset&MASK_OFFSET; //transfer byte[] directly from log file without copying into memory DataInput2 input = (DataInput2) log.getDataInput(logSize, size); ByteBuffer buf = input.buf.duplicate(); buf.position(input.pos); buf.limit(input.pos+size); phys.ensureAvailable(offset+size); phys.putData(offset, buf); logSize+=size; }else if(ins == WAL_SKIP_REST_OF_BLOCK){ logSize += CHUNK_SIZE -(logSize&CHUNK_SIZE_MOD_MASK); }else{ throw new AssertionError("unknown trans log instruction '"+ins +"' at log offset: "+(logSize-1)); } ins = log.getByte(logSize); logSize+=1; } index.putLong(IO_INDEX_SIZE,log.getSixLong(logSize)); logSize+=6; index.putLong(IO_PHYS_SIZE,log.getSixLong(logSize)); logSize+=6; index.putLong(IO_FREE_SIZE,log.getSixLong(logSize)); logSize+=6; index.putLong(IO_INDEX_SUM,log.getLong(logSize)); logSize+=8; //flush dbs if(!syncOnCommitDisabled){ phys.sync(); index.sync(); } logReset(); assert(structuralLock.isHeldByCurrentThread()); } @Override public void rollback() throws UnsupportedOperationException { lockAllWrite(); try{ //discard trans log logReset(); reloadIndexFile(); }finally { unlockAllWrite(); } } protected long[] getLinkedRecordsFromLog(long ioRecid){ assert(locks[Store.lockPos(ioRecid)].writeLock().isHeldByCurrentThread()); long[] ret0 = modified.get(ioRecid); if(ret0==PREALLOC) return ret0; if(ret0!=null && ret0!=TOMBSTONE){ long[] ret = new long[ret0.length]; for(int i=0;i<ret0.length;i++){ long offset = ret0[i] & LOG_MASK_OFFSET; //offset now points to log file, read phys offset from log file ret[i] = log.getLong(offset-8); } return ret; } return null; } @Override protected long longStackTake(long ioList, boolean recursive) { assert(structuralLock.isHeldByCurrentThread()); assert(ioList>=IO_FREE_RECID && ioList<IO_USER_START) :"wrong ioList: "+ioList; long dataOffset = indexVals[((int) ioList/8)]; if(dataOffset == 0) return 0; //there is no such list, so just return 0 long pos = dataOffset>>>48; dataOffset &= MASK_OFFSET; byte[] page = longStackGetPage(dataOffset); if(pos<8) throw new AssertionError(); final long ret = longStackGetSixLong(page, (int) pos); //was it only record at that page? if(pos == 8){ //yes, delete this page long next = longStackGetSixLong(page,2); long size = ((page[0]&0xFF)<<8) | (page[1]&0xFF); assert(size == page.length); if(next !=0){ //update index so it points to previous page byte[] nextPage = longStackGetPage(next); //TODO this page is not modifed, but is added to LOG long nextSize = ((nextPage[0]&0xFF)<<8) | (nextPage[1]&0xFF); assert((nextSize-8)%6==0); indexVals[((int) ioList/8)]=((nextSize-6)<<48)|next; indexValsModified[((int) ioList/8)]=true; }else{ //zero out index indexVals[((int) ioList/8)]=0L; indexValsModified[((int) ioList/8)]=true; if(maxUsedIoList==ioList){ //max value was just deleted, so find new maxima while(indexVals[((int) maxUsedIoList/8)]==0 && maxUsedIoList>IO_FREE_RECID){ maxUsedIoList-=8; } } } //put space used by this page into free list freePhysPut((size<<48) | dataOffset, true); assert(dataOffset>>>48==0); longStackPages.remove(dataOffset); }else{ //no, it was not last record at this page, so just decrement the counter pos-=6; indexVals[((int) ioList/8)] = (pos<<48)| dataOffset; indexValsModified[((int) ioList/8)] = true; } //System.out.println("longStackTake: "+ioList+" - "+ret); return ret; } @Override protected void longStackPut(long ioList, long offset, boolean recursive) { assert(structuralLock.isHeldByCurrentThread()); assert(offset>>>48==0); assert(ioList>=IO_FREE_RECID && ioList<=IO_USER_START): "wrong ioList: "+ioList; long dataOffset = indexVals[((int) ioList/8)]; long pos = dataOffset>>>48; dataOffset &= MASK_OFFSET; if(dataOffset == 0){ //empty list? //yes empty, create new page and fill it with values final long listPhysid = freePhysTake((int) LONG_STACK_PREF_SIZE,true,true) &MASK_OFFSET; if(listPhysid == 0) throw new AssertionError(); assert(listPhysid>>>48==0); //set previous Free Index List page to zero as this is first page //also set size of this record byte[] page = new byte[(int) LONG_STACK_PREF_SIZE]; page[0] = (byte) (0xFF & (page.length>>>8)); page[1] = (byte) (0xFF & (page.length)); longStackPutSixLong(page,2,0L); //set record longStackPutSixLong(page, 8, offset); //and update index file with new page location indexVals[((int) ioList/8)] = ( 8L << 48) | listPhysid; indexValsModified[((int) ioList/8)] = true; if(maxUsedIoList<=ioList) maxUsedIoList=ioList; longStackPages.put(listPhysid,page); }else{ byte[] page = longStackGetPage(dataOffset); long size = ((page[0]&0xFF)<<8)|(page[1]&0xFF); assert(pos+6<=size); if(pos+6==size){ //is current page full? long newPageSize = LONG_STACK_PREF_SIZE; if(ioList == size2ListIoRecid(LONG_STACK_PREF_SIZE)){ //TODO double allocation fix needs more investigation newPageSize = LONG_STACK_PREF_SIZE_ALTER; } //yes it is full, so we need to allocate new page and write our number there final long listPhysid = freePhysTake((int) newPageSize,true,true) &MASK_OFFSET; if(listPhysid == 0) throw new AssertionError(); byte[] newPage = new byte[(int) newPageSize]; //set current page size newPage[0] = (byte) (0xFF & (newPageSize>>>8)); newPage[1] = (byte) (0xFF & (newPageSize)); //set location to previous page and longStackPutSixLong(newPage,2,dataOffset&MASK_OFFSET); //set the value itself longStackPutSixLong(newPage, 8, offset); assert(listPhysid>>>48==0); longStackPages.put(listPhysid,newPage); //and update index file with new page location and number of records indexVals[((int) ioList/8)] = (8L<<48) | listPhysid; indexValsModified[((int) ioList/8)] = true; }else{ //there is space on page, so just write offset and increase the counter pos+=6; longStackPutSixLong(page, (int) pos,offset); indexVals[((int) ioList/8)] = (pos<<48)| dataOffset; indexValsModified[((int) ioList/8)] = true; } } } protected static long longStackGetSixLong(byte[] page, int pos) { return ((long) (page[pos + 0] & 0xff) << 40) | ((long) (page[pos + 1] & 0xff) << 32) | ((long) (page[pos + 2] & 0xff) << 24) | ((long) (page[pos + 3] & 0xff) << 16) | ((long) (page[pos + 4] & 0xff) << 8) | ((long) (page[pos + 5] & 0xff) << 0); } protected static void longStackPutSixLong(byte[] page, int pos, long value) { assert(value>=0 && (value>>>6*8)==0): "value does not fit"; page[pos + 0] = (byte) (0xff & (value >> 40)); page[pos + 1] = (byte) (0xff & (value >> 32)); page[pos + 2] = (byte) (0xff & (value >> 24)); page[pos + 3] = (byte) (0xff & (value >> 16)); page[pos + 4] = (byte) (0xff & (value >> 8)); page[pos + 5] = (byte) (0xff & (value >> 0)); } protected byte[] longStackGetPage(long offset) { assert(offset>=16); assert(offset>>>48==0); byte[] ret = longStackPages.get(offset); if(ret==null){ //read page size int size = phys.getUnsignedShort(offset); assert(size>=8+6); ret = new byte[size]; try { phys.getDataInput(offset,size).readFully(ret); } catch (IOException e) { throw new IOError(e); } //and load page longStackPages.put(offset,ret); } return ret; } @Override public void close() { for(Runnable closeListener:closeListeners) closeListener.run(); if(serializerPojo!=null && serializerPojo.hasUnsavedChanges()){ serializerPojo.save(this); } lockAllWrite(); try{ if(log !=null){ log.sync(); log.close(); if(deleteFilesAfterClose){ log.deleteFile(); } } index.sync(); phys.sync(); index.close(); phys.close(); if(deleteFilesAfterClose){ index.deleteFile(); phys.deleteFile(); } index = null; phys = null; }finally { unlockAllWrite(); } } @Override protected void compactPreUnderLock() { assert(structuralLock.isLocked()); if(logDirty()) throw new IllegalAccessError("WAL not empty; commit first, than compact"); } @Override protected void compactPostUnderLock() { assert(structuralLock.isLocked()); reloadIndexFile(); } @Override public boolean canRollback(){ return true; } protected void logChecksumAdd(int cs) { for(;;){ int old = logChecksum.get(); if(logChecksum.compareAndSet(old,old|cs)) return; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.analyzer; import com.facebook.presto.sql.tree.QualifiedName; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import javax.annotation.concurrent.Immutable; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Predicate; import static com.facebook.presto.util.ImmutableCollectors.toImmutableList; import static com.facebook.presto.util.ImmutableCollectors.toImmutableSet; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkElementIndex; import static com.google.common.base.Predicates.not; import static java.util.Objects.requireNonNull; /** * TODO: this needs to be merged with RowType at some point (when the type system is unified) */ @Immutable public class RelationType { private final List<Field> visibleFields; private final List<Field> allFields; private final Map<Field, Integer> fieldIndexes; public RelationType(Field... fields) { this(ImmutableList.copyOf(fields)); } public RelationType(List<Field> fields) { requireNonNull(fields, "fields is null"); this.allFields = ImmutableList.copyOf(fields); this.visibleFields = ImmutableList.copyOf(Iterables.filter(fields, not(Field::isHidden))); int index = 0; ImmutableMap.Builder<Field, Integer> builder = ImmutableMap.builder(); for (Field field : fields) { builder.put(field, index++); } fieldIndexes = builder.build(); } /** * Gets the index of the specified field or -1 if not found. */ public int indexOf(Field field) { return fieldIndexes.get(field); } /** * Gets the field at the specified index. */ public Field getFieldByIndex(int fieldIndex) { checkElementIndex(fieldIndex, allFields.size(), "fieldIndex"); return allFields.get(fieldIndex); } /** * Gets only the visible fields. * No assumptions should be made about the order of the fields returned from this method. * To obtain the index of a field, call indexOf. */ public Collection<Field> getVisibleFields() { return visibleFields; } public int getVisibleFieldCount() { return visibleFields.size(); } /** * Gets all fields including hidden fields. * No assumptions should be made about the order of the fields returned from this method. * To obtain the index of a field, call indexOf. */ public Collection<Field> getAllFields() { return ImmutableSet.copyOf(allFields); } /** * Gets the count of all fields including hidden fields. */ public int getAllFieldCount() { return allFields.size(); } /** * Returns all unique relations in this tuple. * For detecting duplicate relations in a Join. */ public Set<QualifiedName> getRelationAliases() { return allFields.stream() .map(Field::getRelationAlias) .filter(Optional::isPresent) .map(Optional::get) .collect(toImmutableSet()); } /** * This method is used for SELECT * or x.* queries */ public List<Field> resolveFieldsWithPrefix(Optional<QualifiedName> prefix) { return visibleFields.stream() .filter(input -> input.matchesPrefix(prefix)) .collect(toImmutableList()); } /** * Gets the index of all columns matching the specified name */ public List<Field> resolveFields(QualifiedName name) { return allFields.stream() .filter(input -> input.canResolve(name)) .collect(toImmutableList()); } public Predicate<QualifiedName> canResolvePredicate() { return input -> !resolveFields(input).isEmpty(); } /** * Creates a new tuple descriptor containing all fields from this tuple descriptor * and all fields from the specified tuple descriptor. */ public RelationType joinWith(RelationType other) { List<Field> fields = ImmutableList.<Field>builder() .addAll(this.allFields) .addAll(other.allFields) .build(); return new RelationType(fields); } /** * Creates a new tuple descriptor with the relation, and, optionally, the columns aliased. */ public RelationType withAlias(String relationAlias, List<String> columnAliases) { if (columnAliases != null) { checkArgument(columnAliases.size() == visibleFields.size(), "Column alias list has %s entries but '%s' has %s columns available", columnAliases.size(), relationAlias, visibleFields.size()); } ImmutableList.Builder<Field> fieldsBuilder = ImmutableList.builder(); for (int i = 0; i < allFields.size(); i++) { Field field = allFields.get(i); Optional<String> columnAlias = field.getName(); if (columnAliases == null) { fieldsBuilder.add(Field.newQualified(QualifiedName.of(relationAlias), columnAlias, field.getType(), field.isHidden())); } else if (!field.isHidden()) { // hidden fields are not exposed when there are column aliases columnAlias = Optional.of(columnAliases.get(i)); fieldsBuilder.add(Field.newQualified(QualifiedName.of(relationAlias), columnAlias, field.getType(), false)); } } return new RelationType(fieldsBuilder.build()); } /** * Creates a new tuple descriptor containing only the visible fields. */ public RelationType withOnlyVisibleFields() { return new RelationType(visibleFields); } @Override public String toString() { return allFields.toString(); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.autoscaling.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Describes an EC2 instance. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/autoscaling-2011-01-01/Instance" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class Instance implements Serializable, Cloneable { /** * <p> * The ID of the instance. * </p> */ private String instanceId; /** * <p> * The instance type of the EC2 instance. * </p> */ private String instanceType; /** * <p> * The Availability Zone in which the instance is running. * </p> */ private String availabilityZone; /** * <p> * A description of the current lifecycle state. The <code>Quarantined</code> state is not used. For information * about lifecycle states, see <a * href="https://docs.aws.amazon.com/autoscaling/ec2/userguide/AutoScalingGroupLifecycle.html">Instance * lifecycle</a> in the <i>Amazon EC2 Auto Scaling User Guide</i>. * </p> */ private String lifecycleState; /** * <p> * The last reported health status of the instance. "Healthy" means that the instance is healthy and should remain * in service. "Unhealthy" means that the instance is unhealthy and that Amazon EC2 Auto Scaling should terminate * and replace it. * </p> */ private String healthStatus; /** * <p> * The launch configuration associated with the instance. * </p> */ private String launchConfigurationName; /** * <p> * The launch template for the instance. * </p> */ private LaunchTemplateSpecification launchTemplate; /** * <p> * Indicates whether the instance is protected from termination by Amazon EC2 Auto Scaling when scaling in. * </p> */ private Boolean protectedFromScaleIn; /** * <p> * The number of capacity units contributed by the instance based on its instance type. * </p> * <p> * Valid Range: Minimum value of 1. Maximum value of 999. * </p> */ private String weightedCapacity; /** * <p> * The ID of the instance. * </p> * * @param instanceId * The ID of the instance. */ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } /** * <p> * The ID of the instance. * </p> * * @return The ID of the instance. */ public String getInstanceId() { return this.instanceId; } /** * <p> * The ID of the instance. * </p> * * @param instanceId * The ID of the instance. * @return Returns a reference to this object so that method calls can be chained together. */ public Instance withInstanceId(String instanceId) { setInstanceId(instanceId); return this; } /** * <p> * The instance type of the EC2 instance. * </p> * * @param instanceType * The instance type of the EC2 instance. */ public void setInstanceType(String instanceType) { this.instanceType = instanceType; } /** * <p> * The instance type of the EC2 instance. * </p> * * @return The instance type of the EC2 instance. */ public String getInstanceType() { return this.instanceType; } /** * <p> * The instance type of the EC2 instance. * </p> * * @param instanceType * The instance type of the EC2 instance. * @return Returns a reference to this object so that method calls can be chained together. */ public Instance withInstanceType(String instanceType) { setInstanceType(instanceType); return this; } /** * <p> * The Availability Zone in which the instance is running. * </p> * * @param availabilityZone * The Availability Zone in which the instance is running. */ public void setAvailabilityZone(String availabilityZone) { this.availabilityZone = availabilityZone; } /** * <p> * The Availability Zone in which the instance is running. * </p> * * @return The Availability Zone in which the instance is running. */ public String getAvailabilityZone() { return this.availabilityZone; } /** * <p> * The Availability Zone in which the instance is running. * </p> * * @param availabilityZone * The Availability Zone in which the instance is running. * @return Returns a reference to this object so that method calls can be chained together. */ public Instance withAvailabilityZone(String availabilityZone) { setAvailabilityZone(availabilityZone); return this; } /** * <p> * A description of the current lifecycle state. The <code>Quarantined</code> state is not used. For information * about lifecycle states, see <a * href="https://docs.aws.amazon.com/autoscaling/ec2/userguide/AutoScalingGroupLifecycle.html">Instance * lifecycle</a> in the <i>Amazon EC2 Auto Scaling User Guide</i>. * </p> * * @param lifecycleState * A description of the current lifecycle state. The <code>Quarantined</code> state is not used. For * information about lifecycle states, see <a * href="https://docs.aws.amazon.com/autoscaling/ec2/userguide/AutoScalingGroupLifecycle.html">Instance * lifecycle</a> in the <i>Amazon EC2 Auto Scaling User Guide</i>. * @see LifecycleState */ public void setLifecycleState(String lifecycleState) { this.lifecycleState = lifecycleState; } /** * <p> * A description of the current lifecycle state. The <code>Quarantined</code> state is not used. For information * about lifecycle states, see <a * href="https://docs.aws.amazon.com/autoscaling/ec2/userguide/AutoScalingGroupLifecycle.html">Instance * lifecycle</a> in the <i>Amazon EC2 Auto Scaling User Guide</i>. * </p> * * @return A description of the current lifecycle state. The <code>Quarantined</code> state is not used. For * information about lifecycle states, see <a * href="https://docs.aws.amazon.com/autoscaling/ec2/userguide/AutoScalingGroupLifecycle.html">Instance * lifecycle</a> in the <i>Amazon EC2 Auto Scaling User Guide</i>. * @see LifecycleState */ public String getLifecycleState() { return this.lifecycleState; } /** * <p> * A description of the current lifecycle state. The <code>Quarantined</code> state is not used. For information * about lifecycle states, see <a * href="https://docs.aws.amazon.com/autoscaling/ec2/userguide/AutoScalingGroupLifecycle.html">Instance * lifecycle</a> in the <i>Amazon EC2 Auto Scaling User Guide</i>. * </p> * * @param lifecycleState * A description of the current lifecycle state. The <code>Quarantined</code> state is not used. For * information about lifecycle states, see <a * href="https://docs.aws.amazon.com/autoscaling/ec2/userguide/AutoScalingGroupLifecycle.html">Instance * lifecycle</a> in the <i>Amazon EC2 Auto Scaling User Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. * @see LifecycleState */ public Instance withLifecycleState(String lifecycleState) { setLifecycleState(lifecycleState); return this; } /** * <p> * A description of the current lifecycle state. The <code>Quarantined</code> state is not used. For information * about lifecycle states, see <a * href="https://docs.aws.amazon.com/autoscaling/ec2/userguide/AutoScalingGroupLifecycle.html">Instance * lifecycle</a> in the <i>Amazon EC2 Auto Scaling User Guide</i>. * </p> * * @param lifecycleState * A description of the current lifecycle state. The <code>Quarantined</code> state is not used. For * information about lifecycle states, see <a * href="https://docs.aws.amazon.com/autoscaling/ec2/userguide/AutoScalingGroupLifecycle.html">Instance * lifecycle</a> in the <i>Amazon EC2 Auto Scaling User Guide</i>. * @see LifecycleState */ public void setLifecycleState(LifecycleState lifecycleState) { withLifecycleState(lifecycleState); } /** * <p> * A description of the current lifecycle state. The <code>Quarantined</code> state is not used. For information * about lifecycle states, see <a * href="https://docs.aws.amazon.com/autoscaling/ec2/userguide/AutoScalingGroupLifecycle.html">Instance * lifecycle</a> in the <i>Amazon EC2 Auto Scaling User Guide</i>. * </p> * * @param lifecycleState * A description of the current lifecycle state. The <code>Quarantined</code> state is not used. For * information about lifecycle states, see <a * href="https://docs.aws.amazon.com/autoscaling/ec2/userguide/AutoScalingGroupLifecycle.html">Instance * lifecycle</a> in the <i>Amazon EC2 Auto Scaling User Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. * @see LifecycleState */ public Instance withLifecycleState(LifecycleState lifecycleState) { this.lifecycleState = lifecycleState.toString(); return this; } /** * <p> * The last reported health status of the instance. "Healthy" means that the instance is healthy and should remain * in service. "Unhealthy" means that the instance is unhealthy and that Amazon EC2 Auto Scaling should terminate * and replace it. * </p> * * @param healthStatus * The last reported health status of the instance. "Healthy" means that the instance is healthy and should * remain in service. "Unhealthy" means that the instance is unhealthy and that Amazon EC2 Auto Scaling * should terminate and replace it. */ public void setHealthStatus(String healthStatus) { this.healthStatus = healthStatus; } /** * <p> * The last reported health status of the instance. "Healthy" means that the instance is healthy and should remain * in service. "Unhealthy" means that the instance is unhealthy and that Amazon EC2 Auto Scaling should terminate * and replace it. * </p> * * @return The last reported health status of the instance. "Healthy" means that the instance is healthy and should * remain in service. "Unhealthy" means that the instance is unhealthy and that Amazon EC2 Auto Scaling * should terminate and replace it. */ public String getHealthStatus() { return this.healthStatus; } /** * <p> * The last reported health status of the instance. "Healthy" means that the instance is healthy and should remain * in service. "Unhealthy" means that the instance is unhealthy and that Amazon EC2 Auto Scaling should terminate * and replace it. * </p> * * @param healthStatus * The last reported health status of the instance. "Healthy" means that the instance is healthy and should * remain in service. "Unhealthy" means that the instance is unhealthy and that Amazon EC2 Auto Scaling * should terminate and replace it. * @return Returns a reference to this object so that method calls can be chained together. */ public Instance withHealthStatus(String healthStatus) { setHealthStatus(healthStatus); return this; } /** * <p> * The launch configuration associated with the instance. * </p> * * @param launchConfigurationName * The launch configuration associated with the instance. */ public void setLaunchConfigurationName(String launchConfigurationName) { this.launchConfigurationName = launchConfigurationName; } /** * <p> * The launch configuration associated with the instance. * </p> * * @return The launch configuration associated with the instance. */ public String getLaunchConfigurationName() { return this.launchConfigurationName; } /** * <p> * The launch configuration associated with the instance. * </p> * * @param launchConfigurationName * The launch configuration associated with the instance. * @return Returns a reference to this object so that method calls can be chained together. */ public Instance withLaunchConfigurationName(String launchConfigurationName) { setLaunchConfigurationName(launchConfigurationName); return this; } /** * <p> * The launch template for the instance. * </p> * * @param launchTemplate * The launch template for the instance. */ public void setLaunchTemplate(LaunchTemplateSpecification launchTemplate) { this.launchTemplate = launchTemplate; } /** * <p> * The launch template for the instance. * </p> * * @return The launch template for the instance. */ public LaunchTemplateSpecification getLaunchTemplate() { return this.launchTemplate; } /** * <p> * The launch template for the instance. * </p> * * @param launchTemplate * The launch template for the instance. * @return Returns a reference to this object so that method calls can be chained together. */ public Instance withLaunchTemplate(LaunchTemplateSpecification launchTemplate) { setLaunchTemplate(launchTemplate); return this; } /** * <p> * Indicates whether the instance is protected from termination by Amazon EC2 Auto Scaling when scaling in. * </p> * * @param protectedFromScaleIn * Indicates whether the instance is protected from termination by Amazon EC2 Auto Scaling when scaling in. */ public void setProtectedFromScaleIn(Boolean protectedFromScaleIn) { this.protectedFromScaleIn = protectedFromScaleIn; } /** * <p> * Indicates whether the instance is protected from termination by Amazon EC2 Auto Scaling when scaling in. * </p> * * @return Indicates whether the instance is protected from termination by Amazon EC2 Auto Scaling when scaling in. */ public Boolean getProtectedFromScaleIn() { return this.protectedFromScaleIn; } /** * <p> * Indicates whether the instance is protected from termination by Amazon EC2 Auto Scaling when scaling in. * </p> * * @param protectedFromScaleIn * Indicates whether the instance is protected from termination by Amazon EC2 Auto Scaling when scaling in. * @return Returns a reference to this object so that method calls can be chained together. */ public Instance withProtectedFromScaleIn(Boolean protectedFromScaleIn) { setProtectedFromScaleIn(protectedFromScaleIn); return this; } /** * <p> * Indicates whether the instance is protected from termination by Amazon EC2 Auto Scaling when scaling in. * </p> * * @return Indicates whether the instance is protected from termination by Amazon EC2 Auto Scaling when scaling in. */ public Boolean isProtectedFromScaleIn() { return this.protectedFromScaleIn; } /** * <p> * The number of capacity units contributed by the instance based on its instance type. * </p> * <p> * Valid Range: Minimum value of 1. Maximum value of 999. * </p> * * @param weightedCapacity * The number of capacity units contributed by the instance based on its instance type.</p> * <p> * Valid Range: Minimum value of 1. Maximum value of 999. */ public void setWeightedCapacity(String weightedCapacity) { this.weightedCapacity = weightedCapacity; } /** * <p> * The number of capacity units contributed by the instance based on its instance type. * </p> * <p> * Valid Range: Minimum value of 1. Maximum value of 999. * </p> * * @return The number of capacity units contributed by the instance based on its instance type.</p> * <p> * Valid Range: Minimum value of 1. Maximum value of 999. */ public String getWeightedCapacity() { return this.weightedCapacity; } /** * <p> * The number of capacity units contributed by the instance based on its instance type. * </p> * <p> * Valid Range: Minimum value of 1. Maximum value of 999. * </p> * * @param weightedCapacity * The number of capacity units contributed by the instance based on its instance type.</p> * <p> * Valid Range: Minimum value of 1. Maximum value of 999. * @return Returns a reference to this object so that method calls can be chained together. */ public Instance withWeightedCapacity(String weightedCapacity) { setWeightedCapacity(weightedCapacity); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getInstanceId() != null) sb.append("InstanceId: ").append(getInstanceId()).append(","); if (getInstanceType() != null) sb.append("InstanceType: ").append(getInstanceType()).append(","); if (getAvailabilityZone() != null) sb.append("AvailabilityZone: ").append(getAvailabilityZone()).append(","); if (getLifecycleState() != null) sb.append("LifecycleState: ").append(getLifecycleState()).append(","); if (getHealthStatus() != null) sb.append("HealthStatus: ").append(getHealthStatus()).append(","); if (getLaunchConfigurationName() != null) sb.append("LaunchConfigurationName: ").append(getLaunchConfigurationName()).append(","); if (getLaunchTemplate() != null) sb.append("LaunchTemplate: ").append(getLaunchTemplate()).append(","); if (getProtectedFromScaleIn() != null) sb.append("ProtectedFromScaleIn: ").append(getProtectedFromScaleIn()).append(","); if (getWeightedCapacity() != null) sb.append("WeightedCapacity: ").append(getWeightedCapacity()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof Instance == false) return false; Instance other = (Instance) obj; if (other.getInstanceId() == null ^ this.getInstanceId() == null) return false; if (other.getInstanceId() != null && other.getInstanceId().equals(this.getInstanceId()) == false) return false; if (other.getInstanceType() == null ^ this.getInstanceType() == null) return false; if (other.getInstanceType() != null && other.getInstanceType().equals(this.getInstanceType()) == false) return false; if (other.getAvailabilityZone() == null ^ this.getAvailabilityZone() == null) return false; if (other.getAvailabilityZone() != null && other.getAvailabilityZone().equals(this.getAvailabilityZone()) == false) return false; if (other.getLifecycleState() == null ^ this.getLifecycleState() == null) return false; if (other.getLifecycleState() != null && other.getLifecycleState().equals(this.getLifecycleState()) == false) return false; if (other.getHealthStatus() == null ^ this.getHealthStatus() == null) return false; if (other.getHealthStatus() != null && other.getHealthStatus().equals(this.getHealthStatus()) == false) return false; if (other.getLaunchConfigurationName() == null ^ this.getLaunchConfigurationName() == null) return false; if (other.getLaunchConfigurationName() != null && other.getLaunchConfigurationName().equals(this.getLaunchConfigurationName()) == false) return false; if (other.getLaunchTemplate() == null ^ this.getLaunchTemplate() == null) return false; if (other.getLaunchTemplate() != null && other.getLaunchTemplate().equals(this.getLaunchTemplate()) == false) return false; if (other.getProtectedFromScaleIn() == null ^ this.getProtectedFromScaleIn() == null) return false; if (other.getProtectedFromScaleIn() != null && other.getProtectedFromScaleIn().equals(this.getProtectedFromScaleIn()) == false) return false; if (other.getWeightedCapacity() == null ^ this.getWeightedCapacity() == null) return false; if (other.getWeightedCapacity() != null && other.getWeightedCapacity().equals(this.getWeightedCapacity()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getInstanceId() == null) ? 0 : getInstanceId().hashCode()); hashCode = prime * hashCode + ((getInstanceType() == null) ? 0 : getInstanceType().hashCode()); hashCode = prime * hashCode + ((getAvailabilityZone() == null) ? 0 : getAvailabilityZone().hashCode()); hashCode = prime * hashCode + ((getLifecycleState() == null) ? 0 : getLifecycleState().hashCode()); hashCode = prime * hashCode + ((getHealthStatus() == null) ? 0 : getHealthStatus().hashCode()); hashCode = prime * hashCode + ((getLaunchConfigurationName() == null) ? 0 : getLaunchConfigurationName().hashCode()); hashCode = prime * hashCode + ((getLaunchTemplate() == null) ? 0 : getLaunchTemplate().hashCode()); hashCode = prime * hashCode + ((getProtectedFromScaleIn() == null) ? 0 : getProtectedFromScaleIn().hashCode()); hashCode = prime * hashCode + ((getWeightedCapacity() == null) ? 0 : getWeightedCapacity().hashCode()); return hashCode; } @Override public Instance clone() { try { return (Instance) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.datanode; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.HardLink; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.LayoutVersion; import org.apache.hadoop.hdfs.protocol.LayoutVersion.Feature; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption; import org.apache.hadoop.hdfs.server.common.InconsistentFSStateException; import org.apache.hadoop.hdfs.server.common.Storage; import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.util.Daemon; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.Properties; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Manages storage for the set of BlockPoolSlices which share a particular * block pool id, on this DataNode. * <p/> * This class supports the following functionality: * <ol> * <li> Formatting a new block pool storage</li> * <li> Recovering a storage state to a consistent state (if possible></li> * <li> Taking a snapshot of the block pool during upgrade</li> * <li> Rolling back a block pool to a previous snapshot</li> * <li> Finalizing block storage by deletion of a snapshot</li> * </ul> * * @see Storage */ @InterfaceAudience.Private public class BlockPoolSliceStorage extends Storage { private static final Pattern BLOCK_POOL_PATH_PATTERN = Pattern.compile( "^(.*)" + "(\\/BP-[0-9]+\\-\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\-[0-9]+\\/.*)$"); private String blockpoolID = ""; // id of the blockpool public BlockPoolSliceStorage(StorageInfo storageInfo, String bpid) { super(NodeType.DATA_NODE, storageInfo); blockpoolID = bpid; } BlockPoolSliceStorage(int namespaceID, String bpID, long cTime, String clusterId) { super(NodeType.DATA_NODE); this.namespaceID = namespaceID; this.blockpoolID = bpID; this.cTime = cTime; this.clusterID = clusterId; } private BlockPoolSliceStorage() { super(NodeType.DATA_NODE); } /** * Analyze storage directories. Recover from previous transitions if * required. * * @param datanode * Datanode to which this storage belongs to * @param nsInfo * namespace information * @param dataDirs * storage directories of block pool * @param startOpt * startup option * @throws IOException * on error */ void recoverTransitionRead(DataNode datanode, NamespaceInfo nsInfo, Collection<File> dataDirs, StartupOption startOpt) throws IOException { assert HdfsConstants.LAYOUT_VERSION == nsInfo .getLayoutVersion() : "Block-pool and name-node layout versions must be the same."; // 1. For each BP data directory analyze the state and // check whether all is consistent before transitioning. this.storageDirs = new ArrayList<>(dataDirs.size()); ArrayList<StorageState> dataDirStates = new ArrayList<>(dataDirs.size()); for (Iterator<File> it = dataDirs.iterator(); it.hasNext(); ) { File dataDir = it.next(); StorageDirectory sd = new StorageDirectory(dataDir, null, false); StorageState curState; try { curState = sd.analyzeStorage(startOpt, this); // sd is locked but not opened switch (curState) { case NORMAL: break; case NON_EXISTENT: // ignore this storage LOG.info("Storage directory " + dataDir + " does not exist."); it.remove(); continue; case NOT_FORMATTED: // format LOG.info("Storage directory " + dataDir + " is not formatted."); LOG.info("Formatting ..."); format(sd, nsInfo); break; default: // recovery part is common sd.doRecover(curState); } } catch (IOException ioe) { sd.unlock(); throw ioe; } // add to the storage list. This is inherited from parent class, Storage. addStorageDir(sd); dataDirStates.add(curState); } if (dataDirs.size() == 0) // none of the data dirs exist { throw new IOException( "All specified directories are not accessible or do not exist."); } // 2. Do transitions // Each storage directory is treated individually. // During startup some of them can upgrade or roll back // while others could be up-to-date for the regular startup. for (int idx = 0; idx < getNumStorageDirs(); idx++) { doTransition(getStorageDir(idx), nsInfo, startOpt); assert getLayoutVersion() == nsInfo .getLayoutVersion() : "Data-node and name-node layout versions must be the same."; assert getCTime() == nsInfo .getCTime() : "Data-node and name-node CTimes must be the same."; } // 3. Update all storages. Some of them might have just been formatted. this.writeAll(); } /** * Format a block pool slice storage. * * @param dnCurDir * DataStorage current directory * @param nsInfo * the name space info * @throws IOException * Signals that an I/O exception has occurred. */ void format(File dnCurDir, NamespaceInfo nsInfo) throws IOException { File curBpDir = getBpRoot(nsInfo.getBlockPoolID(), dnCurDir); StorageDirectory bpSdir = new StorageDirectory(curBpDir); format(bpSdir, nsInfo); } /** * Format a block pool slice storage. * * @param sd * the block pool storage * @param nsInfo * the name space info * @throws IOException * Signals that an I/O exception has occurred. */ private void format(StorageDirectory bpSdir, NamespaceInfo nsInfo) throws IOException { LOG.info("Formatting block pool " + blockpoolID + " directory " + bpSdir.getCurrentDir()); bpSdir.clearDirectory(); // create directory this.layoutVersion = HdfsConstants.LAYOUT_VERSION; this.cTime = nsInfo.getCTime(); this.namespaceID = nsInfo.getNamespaceID(); this.blockpoolID = nsInfo.getBlockPoolID(); this.storageType = NodeType.DATA_NODE; writeProperties(bpSdir); } /** * Set layoutVersion, namespaceID and blockpoolID into block pool storage * VERSION file */ @Override protected void setPropertiesFromFields(Properties props, StorageDirectory sd) throws IOException { props.setProperty("layoutVersion", String.valueOf(layoutVersion)); props.setProperty("namespaceID", String.valueOf(namespaceID)); props.setProperty("blockpoolID", blockpoolID); props.setProperty("cTime", String.valueOf(cTime)); } /** * Validate and set block pool ID */ private void setBlockPoolID(File storage, String bpid) throws InconsistentFSStateException { if (bpid == null || bpid.equals("")) { throw new InconsistentFSStateException(storage, "file " + STORAGE_FILE_VERSION + " is invalid."); } if (!blockpoolID.equals("") && !blockpoolID.equals(bpid)) { throw new InconsistentFSStateException(storage, "Unexepcted blockpoolID " + bpid + " . Expected " + blockpoolID); } blockpoolID = bpid; } @Override protected void setFieldsFromProperties(Properties props, StorageDirectory sd) throws IOException { setLayoutVersion(props, sd); setNamespaceID(props, sd); setcTime(props, sd); String sbpid = props.getProperty("blockpoolID"); setBlockPoolID(sd.getRoot(), sbpid); } /** * Analyze whether a transition of the BP state is required and * perform it if necessary. * <br> * Rollback if previousLV >= LAYOUT_VERSION && prevCTime <= namenode.cTime. * Upgrade if this.LV > LAYOUT_VERSION || this.cTime < namenode.cTime Regular * startup if this.LV = LAYOUT_VERSION && this.cTime = namenode.cTime * * @param dn * DataNode to which this storage belongs to * @param sd * storage directory <SD>/current/<bpid> * @param nsInfo * namespace info * @param startOpt * startup option * @throws IOException */ private void doTransition(StorageDirectory sd, NamespaceInfo nsInfo, StartupOption startOpt) throws IOException { if (startOpt == StartupOption.ROLLBACK) { doRollback(sd, nsInfo); // rollback if applicable } readProperties(sd); checkVersionUpgradable(this.layoutVersion); assert this.layoutVersion >= HdfsConstants.LAYOUT_VERSION : "Future version is not allowed"; if (getNamespaceID() != nsInfo.getNamespaceID()) { throw new IOException( "Incompatible namespaceIDs in " + sd.getRoot().getCanonicalPath() + ": namenode namespaceID = " + nsInfo.getNamespaceID() + "; datanode namespaceID = " + getNamespaceID()); } if (!blockpoolID.equals(nsInfo.getBlockPoolID())) { throw new IOException( "Incompatible blockpoolIDs in " + sd.getRoot().getCanonicalPath() + ": namenode blockpoolID = " + nsInfo.getBlockPoolID() + "; datanode blockpoolID = " + blockpoolID); } if (this.layoutVersion == HdfsConstants.LAYOUT_VERSION && this.cTime == nsInfo.getCTime()) { return; // regular startup } if (this.layoutVersion > HdfsConstants.LAYOUT_VERSION || this.cTime < nsInfo.getCTime()) { doUpgrade(sd, nsInfo); // upgrade return; } // layoutVersion == LAYOUT_VERSION && this.cTime > nsInfo.cTime // must shutdown throw new IOException( "Datanode state: LV = " + this.getLayoutVersion() + " CTime = " + this.getCTime() + " is newer than the namespace state: LV = " + nsInfo.getLayoutVersion() + " CTime = " + nsInfo.getCTime()); } /** * Upgrade to any release after 0.22 (0.22 included) release e.g. 0.22 => * 0.23 * Upgrade procedure is as follows: * <ol> * <li>If <SD>/current/<bpid>/previous exists then delete it</li> * <li>Rename <SD>/current/<bpid>/current to * <SD>/current/bpid/current/previous.tmp</li> * <li>Create new <SD>current/<bpid>/current directory</li> * <ol> * <li>Hard links for block files are created from previous.tmp to * current</li> * <li>Save new version file in current directory</li> * </ol> * <li>Rename previous.tmp to previous</li> </ol> * * @param bpSd * storage directory <SD>/current/<bpid> * @param nsInfo * Namespace Info from the namenode * @throws IOException * on error */ void doUpgrade(StorageDirectory bpSd, NamespaceInfo nsInfo) throws IOException { // Upgrading is applicable only to release with federation or after if (!LayoutVersion.supports(Feature.FEDERATION, layoutVersion)) { return; } LOG.info("Upgrading block pool storage directory " + bpSd.getRoot() + ".\n old LV = " + this.getLayoutVersion() + "; old CTime = " + this.getCTime() + ".\n new LV = " + nsInfo.getLayoutVersion() + "; new CTime = " + nsInfo.getCTime()); // get <SD>/previous directory String dnRoot = getDataNodeStorageRoot(bpSd.getRoot().getCanonicalPath()); StorageDirectory dnSdStorage = new StorageDirectory(new File(dnRoot)); File dnPrevDir = dnSdStorage.getPreviousDir(); // If <SD>/previous directory exists delete it if (dnPrevDir.exists()) { deleteDir(dnPrevDir); } File bpCurDir = bpSd.getCurrentDir(); File bpPrevDir = bpSd.getPreviousDir(); assert bpCurDir.exists() : "BP level current directory must exist."; cleanupDetachDir(new File(bpCurDir, DataStorage.STORAGE_DIR_DETACHED)); // 1. Delete <SD>/current/<bpid>/previous dir before upgrading if (bpPrevDir.exists()) { deleteDir(bpPrevDir); } File bpTmpDir = bpSd.getPreviousTmp(); assert !bpTmpDir.exists() : "previous.tmp directory must not exist."; // 2. Rename <SD>/curernt/<bpid>/current to <SD>/curernt/<bpid>/previous.tmp rename(bpCurDir, bpTmpDir); // 3. Create new <SD>/current with block files hardlinks and VERSION linkAllBlocks(bpTmpDir, bpCurDir); this.layoutVersion = HdfsConstants.LAYOUT_VERSION; assert this.namespaceID == nsInfo .getNamespaceID() : "Data-node and name-node layout versions must be the same."; this.cTime = nsInfo.getCTime(); writeProperties(bpSd); // 4.rename <SD>/curernt/<bpid>/previous.tmp to <SD>/curernt/<bpid>/previous rename(bpTmpDir, bpPrevDir); LOG.info("Upgrade of block pool " + blockpoolID + " at " + bpSd.getRoot() + " is complete"); } /** * Cleanup the detachDir. * <p/> * If the directory is not empty report an error; Otherwise remove the * directory. * * @param detachDir * detach directory * @throws IOException * if the directory is not empty or it can not be removed */ private void cleanupDetachDir(File detachDir) throws IOException { if (!LayoutVersion.supports(Feature.APPEND_RBW_DIR, layoutVersion) && detachDir.exists() && detachDir.isDirectory()) { if (FileUtil.list(detachDir).length != 0) { throw new IOException("Detached directory " + detachDir + " is not empty. Please manually move each file under this " + "directory to the finalized directory if the finalized " + "directory tree does not have the file."); } else if (!detachDir.delete()) { throw new IOException("Cannot remove directory " + detachDir); } } } /* * Roll back to old snapshot at the block pool level * If previous directory exists: * <ol> * <li>Rename <SD>/current/<bpid>/current to removed.tmp</li> * <li>Rename * <SD>/current/<bpid>/previous to current</li> * <li>Remove removed.tmp</li> * </ol> * * Do nothing if previous directory does not exist. * @param bpSd Block pool storage directory at <SD>/current/<bpid> */ void doRollback(StorageDirectory bpSd, NamespaceInfo nsInfo) throws IOException { File prevDir = bpSd.getPreviousDir(); // regular startup if previous dir does not exist if (!prevDir.exists()) { return; } // read attributes out of the VERSION file of previous directory BlockPoolSliceStorage prevInfo = new BlockPoolSliceStorage(); prevInfo.readPreviousVersionProperties(bpSd); // We allow rollback to a state, which is either consistent with // the namespace state or can be further upgraded to it. // In another word, we can only roll back when ( storedLV >= software LV) // && ( DN.previousCTime <= NN.ctime) if (!(prevInfo.getLayoutVersion() >= HdfsConstants.LAYOUT_VERSION && prevInfo.getCTime() <= nsInfo.getCTime())) { // cannot rollback throw new InconsistentFSStateException(bpSd.getRoot(), "Cannot rollback to a newer state.\nDatanode previous state: LV = " + prevInfo.getLayoutVersion() + " CTime = " + prevInfo.getCTime() + " is newer than the namespace state: LV = " + nsInfo.getLayoutVersion() + " CTime = " + nsInfo.getCTime()); } LOG.info("Rolling back storage directory " + bpSd.getRoot() + ".\n target LV = " + nsInfo.getLayoutVersion() + "; target CTime = " + nsInfo.getCTime()); File tmpDir = bpSd.getRemovedTmp(); assert !tmpDir.exists() : "removed.tmp directory must not exist."; // 1. rename current to tmp File curDir = bpSd.getCurrentDir(); assert curDir.exists() : "Current directory must exist."; rename(curDir, tmpDir); // 2. rename previous to current rename(prevDir, curDir); // 3. delete removed.tmp dir deleteDir(tmpDir); LOG.info("Rollback of " + bpSd.getRoot() + " is complete"); } /* * Finalize the block pool storage by deleting <BP>/previous directory * that holds the snapshot. */ void doFinalize(File dnCurDir) throws IOException { File bpRoot = getBpRoot(blockpoolID, dnCurDir); StorageDirectory bpSd = new StorageDirectory(bpRoot); // block pool level previous directory File prevDir = bpSd.getPreviousDir(); if (!prevDir.exists()) { return; // already finalized } final String dataDirPath = bpSd.getRoot().getCanonicalPath(); LOG.info("Finalizing upgrade for storage directory " + dataDirPath + ".\n cur LV = " + this.getLayoutVersion() + "; cur CTime = " + this.getCTime()); assert bpSd.getCurrentDir().exists() : "Current directory must exist."; // rename previous to finalized.tmp final File tmpDir = bpSd.getFinalizedTmp(); rename(prevDir, tmpDir); // delete finalized.tmp dir in a separate thread new Daemon(new Runnable() { @Override public void run() { try { deleteDir(tmpDir); } catch (IOException ex) { LOG.error("Finalize upgrade for " + dataDirPath + " failed.", ex); } LOG.info("Finalize upgrade for " + dataDirPath + " is complete."); } @Override public String toString() { return "Finalize " + dataDirPath; } }).start(); } /** * Hardlink all finalized and RBW blocks in fromDir to toDir * * @param fromDir * directory where the snapshot is stored * @param toDir * the current data directory * @throws IOException * if error occurs during hardlink */ private void linkAllBlocks(File fromDir, File toDir) throws IOException { // do the link int diskLayoutVersion = this.getLayoutVersion(); // hardlink finalized blocks in tmpDir HardLink hardLink = new HardLink(); DataStorage.linkBlocks(new File(fromDir, DataStorage.STORAGE_DIR_FINALIZED), new File(toDir, DataStorage.STORAGE_DIR_FINALIZED), diskLayoutVersion, hardLink); DataStorage.linkBlocks(new File(fromDir, DataStorage.STORAGE_DIR_RBW), new File(toDir, DataStorage.STORAGE_DIR_RBW), diskLayoutVersion, hardLink); LOG.info(hardLink.linkStats.report()); } /** * gets the data node storage directory based on block pool storage * * @param bpRoot * @return */ private static String getDataNodeStorageRoot(String bpRoot) { Matcher matcher = BLOCK_POOL_PATH_PATTERN.matcher(bpRoot); if (matcher.matches()) { // return the data node root directory return matcher.group(1); } return bpRoot; } @Override public String toString() { return super.toString() + ";bpid=" + blockpoolID; } /** * Get a block pool storage root based on data node storage root * * @param bpID * block pool ID * @param dnCurDir * data node storage root directory * @return root directory for block pool storage */ public static File getBpRoot(String bpID, File dnCurDir) { return new File(dnCurDir, bpID); } @Override public boolean isPreUpgradableLayout(StorageDirectory sd) throws IOException { return false; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.statistics; import static org.apache.geode.internal.statistics.StatArchiveFormat.NANOS_PER_MILLI; import static org.apache.geode.internal.statistics.StatUtils.findResourceInsts; import static org.apache.geode.internal.statistics.TestStatArchiveWriter.WRITER_INITIAL_DATE_MILLIS; import static org.apache.geode.internal.statistics.TestStatArchiveWriter.WRITER_PREVIOUS_TIMESTAMP_NANOS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.rules.TestName; import org.apache.geode.StatisticDescriptor; import org.apache.geode.Statistics; import org.apache.geode.StatisticsType; import org.apache.geode.internal.statistics.StatArchiveReader.ResourceInst; import org.apache.geode.internal.statistics.StatArchiveReader.StatValue; /** * Generates the stat archive file that is committed under src/test/resources for * {@link StatArchiveWithConsecutiveResourceInstIntegrationTest} to load. * * <p> * The generated gfs file is used to confirm GEODE-1782 and its fix. * * @since Geode 1.0 */ public class StatArchiveWithConsecutiveResourceInstGenerator { private static final Logger logger = LogManager.getLogger(); protected static final String STATS_TYPE_NAME = "TestStats"; protected static final String STATS_SPEC_STRING = ":" + STATS_TYPE_NAME; protected static final String TEST_NAME = StatArchiveWithConsecutiveResourceInstIntegrationTest.class.getSimpleName(); protected static final String ARCHIVE_FILE_NAME = TEST_NAME + ".gfs"; private File dir; private Map<String, String> statisticTypes; private Map<String, Map<String, Number>> allStatistics; protected String archiveFileName; private TestStatisticsManager manager; private TestStatisticsSampler sampler; private SampleCollector sampleCollector; private StatArchiveWriter writer; @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); @Rule public TestName testName = new TestName(); @Before public void setUpGenerator() throws Exception { statisticTypes = new HashMap<>(); allStatistics = new HashMap<>(); dir = temporaryFolder.getRoot(); archiveFileName = new File(ARCHIVE_FILE_NAME).getAbsolutePath(); manager = new TestStatisticsManager(1, getUniqueName(), WRITER_INITIAL_DATE_MILLIS); StatArchiveDescriptor archiveDescriptor = new StatArchiveDescriptor.Builder().setArchiveName(archiveFileName).setSystemId(1) .setSystemStartTime(WRITER_INITIAL_DATE_MILLIS - 2000).setSystemDirectoryPath(TEST_NAME) .setProductDescription(TEST_NAME).build(); writer = new TestStatArchiveWriter(archiveDescriptor); sampler = new TestStatisticsSampler(manager); sampleCollector = new SampleCollector(sampler); sampleCollector.addSampleHandler(writer); } @After public void tearDown() throws Exception { StatisticsTypeFactoryImpl.clear(); } @Test public void generateStatArchiveFile() throws Exception { long sampleTimeNanos = WRITER_PREVIOUS_TIMESTAMP_NANOS + NANOS_PER_MILLI * 1000; // 1) create statistics StatisticsType type = createStatisticsType(STATS_TYPE_NAME, "description of " + STATS_TYPE_NAME); Statistics statistics1 = createStatistics(type, STATS_TYPE_NAME + "1", 1); // 2) sample changing stat for (int i = 0; i < 100; i++) { incInt(statistics1, "stat", 1); sampleCollector.sample(sampleTimeNanos += (1000 * NANOS_PER_MILLI)); } // 3) close statistics statistics1.close(); // 4) recreate statistics Statistics statistics2 = createStatistics(type, STATS_TYPE_NAME + "1", 1); // 5) sample changing stat again for (int i = 0; i < 100; i++) { incInt(statistics2, "stat", 1); sampleCollector.sample(sampleTimeNanos += (1000 * NANOS_PER_MILLI)); } // close the writer writer.close(); // validate that stat archive file exists File actual = new File(archiveFileName); assertTrue(actual.exists()); // validate content of stat archive file using StatArchiveReader StatArchiveReader reader = new StatArchiveReader(new File[] {actual}, null, false); // compare all resourceInst values against what was printed above for (final Object o : reader.getResourceInstList()) { ResourceInst ri = (ResourceInst) o; String resourceName = ri.getName(); assertNotNull(resourceName); String expectedStatsType = statisticTypes.get(resourceName); assertNotNull(expectedStatsType); assertEquals(expectedStatsType, ri.getType().getName()); Map<String, Number> expectedStatValues = allStatistics.get(resourceName); assertNotNull(expectedStatValues); StatValue[] statValues = ri.getStatValues(); for (int i = 0; i < statValues.length; i++) { final String statName = ri.getType().getStats()[i].getName(); assertNotNull(statName); assertNotNull(expectedStatValues.get(statName)); assertEquals(statName, statValues[i].getDescriptor().getName()); statValues[i].setFilter(StatValue.FILTER_NONE); double[] rawSnapshots = statValues[i].getRawSnapshots(); assertEquals("Value " + i + " for " + statName + " is wrong: " + expectedStatValues, expectedStatValues.get(statName).doubleValue(), statValues[i].getSnapshotsMostRecent(), 0.01); } } validateArchiveFile(); } protected void validateArchiveFile() throws IOException { final File archiveFile = new File(archiveFileName); assertTrue(archiveFile.exists()); logger.info("ArchiveFile: {}", archiveFile.getAbsolutePath()); logger.info("ArchiveFile length: {}", archiveFile.length()); for (ResourceInst resourceInst : findResourceInsts(archiveFile, STATS_SPEC_STRING)) { logger.info("ResourceInst: {}", resourceInst); } } private String getUniqueName() { return StatArchiveWithConsecutiveResourceInstGenerator.class + "_" + testName.getMethodName(); } private StatisticsType createStatisticsType(final String name, final String description) { StatisticDescriptor[] descriptors = new StatisticDescriptor[] { manager.createIntCounter("stat", "description of stat", "units"),}; return manager.createType(name, description, descriptors); } private Statistics createStatistics(final StatisticsType type, final String textId, final long numericId) { return manager.createAtomicStatistics(type, textId, 1); } private void incInt(Statistics statistics, String stat, int value) { assertFalse(statistics.isClosed()); Map<String, Number> statValues = allStatistics.get(statistics.getTextId()); if (statValues == null) { statValues = new HashMap<>(); allStatistics.put(statistics.getTextId(), statValues); } statistics.incInt(stat, value); statValues.put(stat, statistics.getInt(stat)); if (statisticTypes.get(statistics.getTextId()) == null) { statisticTypes.put(statistics.getTextId(), statistics.getType().getName()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.tier.sockets.command; import static org.apache.geode.internal.cache.execute.ServerFunctionExecutor.DEFAULT_CLIENT_FUNCTION_TIMEOUT; import java.io.IOException; import java.util.HashSet; import java.util.Set; import org.jetbrains.annotations.NotNull; import org.apache.geode.annotations.Immutable; import org.apache.geode.cache.Region; import org.apache.geode.cache.client.internal.ExecuteFunctionHelper; import org.apache.geode.cache.execute.Function; import org.apache.geode.cache.execute.FunctionException; import org.apache.geode.cache.execute.FunctionInvocationTargetException; import org.apache.geode.cache.execute.FunctionService; import org.apache.geode.cache.operations.ExecuteFunctionOperationContext; import org.apache.geode.cache.query.QueryInvocationTargetException; import org.apache.geode.internal.cache.DistributedRegion; import org.apache.geode.internal.cache.PartitionedRegion; import org.apache.geode.internal.cache.execute.AbstractExecution; import org.apache.geode.internal.cache.execute.DistributedRegionFunctionExecutor; import org.apache.geode.internal.cache.execute.InternalFunctionInvocationTargetException; import org.apache.geode.internal.cache.execute.MemberMappedArgument; import org.apache.geode.internal.cache.execute.PartitionedRegionFunctionExecutor; import org.apache.geode.internal.cache.execute.ServerToClientFunctionResultSender; import org.apache.geode.internal.cache.execute.ServerToClientFunctionResultSender65; import org.apache.geode.internal.cache.tier.CachedRegionHelper; import org.apache.geode.internal.cache.tier.Command; import org.apache.geode.internal.cache.tier.MessageType; import org.apache.geode.internal.cache.tier.ServerSideHandshake; import org.apache.geode.internal.cache.tier.sockets.BaseCommand; import org.apache.geode.internal.cache.tier.sockets.ChunkedMessage; import org.apache.geode.internal.cache.tier.sockets.Message; import org.apache.geode.internal.cache.tier.sockets.Part; import org.apache.geode.internal.cache.tier.sockets.ServerConnection; import org.apache.geode.internal.security.AuthorizeRequest; import org.apache.geode.internal.security.SecurityService; import org.apache.geode.internal.serialization.KnownVersion; /** * @since GemFire 6.6 */ public class ExecuteRegionFunction66 extends BaseCommand { @Immutable private static final ExecuteRegionFunction66 singleton = new ExecuteRegionFunction66(); public static Command getCommand() { return singleton; } ExecuteRegionFunction66() {} @Override public void cmdExecute(final @NotNull Message clientMessage, final @NotNull ServerConnection serverConnection, final @NotNull SecurityService securityService, long start) throws IOException { String regionName = null; Object function = null; Object args = null; MemberMappedArgument memberMappedArg = null; final boolean isBucketsAsFilter; final byte isReExecute; Set<Object> filter = null; byte hasResult = 0; Set<Object> removedNodesSet = null; int partNumber = 0; byte functionState = 0; int functionTimeout = DEFAULT_CLIENT_FUNCTION_TIMEOUT; try { byte[] bytes = clientMessage.getPart(0).getSerializedForm(); functionState = bytes[0]; if (bytes.length >= 5) { functionTimeout = Part.decodeInt(bytes, 1); } if (functionState != 1) { hasResult = (byte) ((functionState & 2) - 1); } else { hasResult = functionState; } if (hasResult == 1) { serverConnection.setAsTrue(REQUIRES_RESPONSE); serverConnection.setAsTrue(REQUIRES_CHUNKED_RESPONSE); } regionName = clientMessage.getPart(1).getCachedString(); function = clientMessage.getPart(2).getStringOrObject(); args = clientMessage.getPart(3).getObject(); Part part = clientMessage.getPart(4); memberMappedArg = extractMemberMappedArgument(part); byte[] flags = clientMessage.getPart(5).getSerializedForm(); if (serverConnection.getClientVersion().ordinal() > KnownVersion.GFE_81.ordinal()) { isBucketsAsFilter = (flags[0] & ExecuteFunctionHelper.BUCKETS_AS_FILTER_MASK) != 0; isReExecute = (flags[0] & ExecuteFunctionHelper.IS_REXECUTE_MASK) != 0 ? (byte) 1 : 0; } else { isReExecute = flags[0]; isBucketsAsFilter = false; } int filterSize = clientMessage.getPart(6).getInt(); filter = populateFilters(clientMessage, filterSize); partNumber = 7 + filterSize; int removedNodesSize = clientMessage.getPart(partNumber).getInt(); removedNodesSet = populateRemovedNodes(clientMessage, removedNodesSize, partNumber); } catch (ClassNotFoundException exception) { logger.warn(String.format("Exception on server while executing function : %s", function), exception); if (hasResult == 1) { writeChunkedException(clientMessage, exception, serverConnection); } else { writeException(clientMessage, exception, false, serverConnection); } serverConnection.setAsTrue(RESPONDED); return; } if (function == null || regionName == null) { String message = generateNullArgumentMessage(regionName, function); logger.warn("{}: {}", serverConnection.getName(), message); sendError(hasResult, clientMessage, message, serverConnection); return; } CachedRegionHelper crHelper = serverConnection.getCachedRegionHelper(); Region region = crHelper.getRegion(regionName); if (region == null) { String message = String.format("The region named %s was not found during execute Function request.", regionName); logger.warn("{}: {}", serverConnection.getName(), message); sendError(hasResult, clientMessage, message, serverConnection); return; } ServerSideHandshake handshake = serverConnection.getHandshake(); int earlierClientReadTimeout = handshake.getClientReadTimeout(); handshake.setClientReadTimeout(functionTimeout); ServerToClientFunctionResultSender resultSender = null; Function<?> functionObject = null; try { if (function instanceof String) { functionObject = FunctionService.getFunction((String) function); if (!validateFunctionObject(clientMessage, serverConnection, function, hasResult, functionState, functionObject)) { return; } } else { functionObject = (Function) function; } // check if the caller is authorized to do this operation on server functionObject.getRequiredPermissions(regionName, args).forEach(securityService::authorize); ExecuteFunctionOperationContext executeContext = getAuthorizedExecuteFunctionOperationContext(args, filter, functionObject.optimizeForWrite(), serverConnection.getAuthzRequest(), functionObject.getId(), region.getFullPath()); ChunkedMessage m = serverConnection.getFunctionResponseMessage(); m.setTransactionId(clientMessage.getTransactionId()); resultSender = new ServerToClientFunctionResultSender65(m, MessageType.EXECUTE_REGION_FUNCTION_RESULT, serverConnection, functionObject, executeContext); AbstractExecution execution = createExecution(args, memberMappedArg, isBucketsAsFilter, filter, removedNodesSet, region, resultSender); if (execution instanceof PartitionedRegionFunctionExecutor) { if ((hasResult == 1) && filter != null && filter.size() == 1) { ServerConnection.executeFunctionOnLocalNodeOnly((byte) 1); } } if (isReExecute == 1) { execution = execution.setIsReExecute(); } if (logger.isDebugEnabled()) { logger.debug( "Executing Function: {} on Server: {} with Execution: {} functionState={} reExecute={} hasResult={}", functionObject.getId(), serverConnection, execution, functionState, isReExecute, hasResult); } if (hasResult == 1) { executeFunctionWithResult(function, functionState, functionObject, execution); } else { executeFunctionNoResult(function, functionState, functionObject, execution); writeReply(clientMessage, serverConnection); } } catch (IOException ioe) { logger.warn(String.format("Exception on server while executing function : %s", function), ioe); final String message = "Server could not send the reply"; sendException(hasResult, clientMessage, message, serverConnection, ioe); } catch (FunctionException fe) { String message = fe.getMessage(); Object cause = fe.getCause(); if (cause instanceof FunctionInvocationTargetException || cause instanceof QueryInvocationTargetException) { logFunctionExceptionCause(function, functionObject, fe, message, cause); resultSender.setException(fe); } else { if (setLastResultReceived(resultSender)) { logger.warn(String.format("Exception on server while executing function : %s", function), fe); sendException(hasResult, clientMessage, message, serverConnection, fe); } } } catch (Exception e) { if (setLastResultReceived(resultSender)) { logger.warn(String.format("Exception on server while executing function : %s", function), e); String message = e.getMessage(); sendException(hasResult, clientMessage, message, serverConnection, e); } } finally { handshake.setClientReadTimeout(earlierClientReadTimeout); ServerConnection.executeFunctionOnLocalNodeOnly((byte) 0); } } void logFunctionExceptionCause(Object function, Function<?> functionObject, FunctionException fe, String message, Object cause) { if (cause instanceof InternalFunctionInvocationTargetException) { // Fix for #44709: User should not be aware of // InternalFunctionInvocationTargetException. No instance of // InternalFunctionInvocationTargetException is giving useful // information to user to take any corrective action hence logging // this at fine level logging // 1> When bucket is moved // 2> Incase of HA FucntionInvocationTargetException thrown. Since // it is HA, fucntion will be reexecuted on right node // 3> Multiple target nodes found for single hop operation // 4> in case of HA member departed if (logger.isDebugEnabled()) { logger.debug(String.format("Exception on server while executing function: %s", function), fe); } } else if (functionObject.isHA()) { logger.warn("Exception on server while executing function : {}", function + " :" + message); } else { logger.warn(String.format("Exception on server while executing function : %s", function), fe); } } AbstractExecution createExecution(Object args, MemberMappedArgument memberMappedArg, boolean isBucketsAsFilter, Set<Object> filter, Set<Object> removedNodesSet, Region region, ServerToClientFunctionResultSender resultSender) { AbstractExecution execution = (AbstractExecution) FunctionService.onRegion(region); if (execution instanceof PartitionedRegionFunctionExecutor) { execution = new PartitionedRegionFunctionExecutor((PartitionedRegion) region, filter, args, memberMappedArg, resultSender, removedNodesSet, isBucketsAsFilter); } else { execution = new DistributedRegionFunctionExecutor((DistributedRegion) region, filter, args, memberMappedArg, resultSender); } return execution; } boolean validateFunctionObject(Message clientMessage, ServerConnection serverConnection, Object function, byte hasResult, byte functionState, Function<?> functionObject) throws IOException { if (functionObject == null) { String message = String.format("The function, %s, has not been registered", function); logger.warn("{}: {}", serverConnection.getName(), message); sendError(hasResult, clientMessage, message, serverConnection); return false; } else { byte functionStateOnServerSide = AbstractExecution.getFunctionState(functionObject.isHA(), functionObject.hasResult(), functionObject.optimizeForWrite()); if (logger.isDebugEnabled()) { logger.debug("Function State on server side: {} on client: {}", functionStateOnServerSide, functionState); } if (functionStateOnServerSide != functionState) { String message = String.format("Function attributes at client and server don't match: %s", function); logger.warn("{}: {}", serverConnection.getName(), message); sendError(hasResult, clientMessage, message, serverConnection); return false; } } return true; } String generateNullArgumentMessage(String regionName, Object function) { String message = null; if (regionName == null) { message = "The input region for the execute function request is null"; } else if (function == null) { message = "The input function for the execute function request is null"; } return message; } MemberMappedArgument extractMemberMappedArgument(Part part) throws IOException, ClassNotFoundException { MemberMappedArgument memberMappedArg = null; if (part != null) { Object obj = part.getObject(); if (obj instanceof MemberMappedArgument) { memberMappedArg = (MemberMappedArgument) obj; } } return memberMappedArg; } Set<Object> populateRemovedNodes(Message clientMessage, int removedNodesSize, int partNumber) throws IOException, ClassNotFoundException { Set<Object> removedNodesSet = null; if (removedNodesSize != 0) { removedNodesSet = new HashSet<>(); partNumber = partNumber + 1; for (int i = 0; i < removedNodesSize; i++) { removedNodesSet.add(clientMessage.getPart(partNumber + i).getStringOrObject()); } } return removedNodesSet; } Set<Object> populateFilters(Message clientMessage, int filterSize) throws IOException, ClassNotFoundException { Set<Object> filter = null; int partNumber; if (filterSize != 0) { filter = new HashSet<>(); partNumber = 7; for (int i = 0; i < filterSize; i++) { filter.add(clientMessage.getPart(partNumber + i).getStringOrObject()); } } return filter; } ExecuteFunctionOperationContext getAuthorizedExecuteFunctionOperationContext(Object args, Set<Object> filter, boolean optimizedForWrite, AuthorizeRequest authzRequest, String functionName, String regionPath) { ExecuteFunctionOperationContext executeContext = null; if (authzRequest != null) { executeContext = authzRequest.executeFunctionAuthorize(functionName, regionPath, filter, args, optimizedForWrite); } return executeContext; } void executeFunctionNoResult(Object function, byte functionState, Function<?> functionObject, AbstractExecution execution) { if (function instanceof String) { switch (functionState) { case AbstractExecution.NO_HA_NO_HASRESULT_NO_OPTIMIZEFORWRITE: execution.execute((String) function); break; case AbstractExecution.NO_HA_NO_HASRESULT_OPTIMIZEFORWRITE: execution.execute((String) function); break; } } else { execution.execute(functionObject); } } void executeFunctionWithResult(Object function, byte functionState, Function<?> functionObject, AbstractExecution execution) { if (function instanceof String) { switch (functionState) { case AbstractExecution.NO_HA_HASRESULT_NO_OPTIMIZEFORWRITE: execution.execute((String) function).getResult(); break; case AbstractExecution.HA_HASRESULT_NO_OPTIMIZEFORWRITE: execution.execute((String) function).getResult(); break; case AbstractExecution.HA_HASRESULT_OPTIMIZEFORWRITE: execution.execute((String) function).getResult(); break; case AbstractExecution.NO_HA_HASRESULT_OPTIMIZEFORWRITE: execution.execute((String) function).getResult(); break; } } else { execution.execute(functionObject).getResult(); } } private void sendException(byte hasResult, Message msg, String message, ServerConnection serverConnection, Throwable e) throws IOException { synchronized (msg) { if (hasResult == 1) { writeFunctionResponseException(msg, MessageType.EXCEPTION, message, serverConnection, e); } else { writeException(msg, e, false, serverConnection); } serverConnection.setAsTrue(RESPONDED); } } private void sendError(byte hasResult, Message msg, String message, ServerConnection serverConnection) throws IOException { synchronized (msg) { if (hasResult == 1) { writeFunctionResponseError(msg, MessageType.EXECUTE_REGION_FUNCTION_ERROR, message, serverConnection); } else { writeErrorResponse(msg, MessageType.EXECUTE_REGION_FUNCTION_ERROR, message, serverConnection); } serverConnection.setAsTrue(RESPONDED); } } protected static void writeFunctionResponseException(Message origMsg, int messageType, String message, ServerConnection serverConnection, Throwable e) throws IOException { ChunkedMessage functionResponseMsg = serverConnection.getFunctionResponseMessage(); ChunkedMessage chunkedResponseMsg = serverConnection.getChunkedResponseMessage(); int numParts = 0; if (functionResponseMsg.headerHasBeenSent()) { if (e instanceof FunctionException && e.getCause() instanceof InternalFunctionInvocationTargetException) { functionResponseMsg.setNumberOfParts(3); functionResponseMsg.addObjPart(e); functionResponseMsg.addStringPart(BaseCommand.getExceptionTrace(e)); InternalFunctionInvocationTargetException fe = (InternalFunctionInvocationTargetException) e.getCause(); functionResponseMsg.addObjPart(fe.getFailedNodeSet()); numParts = 3; } else { functionResponseMsg.setNumberOfParts(2); functionResponseMsg.addObjPart(e); functionResponseMsg.addStringPart(BaseCommand.getExceptionTrace(e)); numParts = 2; } if (logger.isDebugEnabled()) { logger.debug("{}: Sending exception chunk while reply in progress: ", serverConnection.getName(), e); } functionResponseMsg.setServerConnection(serverConnection); functionResponseMsg.setLastChunkAndNumParts(true, numParts); // functionResponseMsg.setLastChunk(true); functionResponseMsg.sendChunk(serverConnection); } else { chunkedResponseMsg.setMessageType(messageType); chunkedResponseMsg.setTransactionId(origMsg.getTransactionId()); chunkedResponseMsg.sendHeader(); if (e instanceof FunctionException && e.getCause() instanceof InternalFunctionInvocationTargetException) { chunkedResponseMsg.setNumberOfParts(3); chunkedResponseMsg.addObjPart(e); chunkedResponseMsg.addStringPart(BaseCommand.getExceptionTrace(e)); InternalFunctionInvocationTargetException fe = (InternalFunctionInvocationTargetException) e.getCause(); chunkedResponseMsg.addObjPart(fe.getFailedNodeSet()); numParts = 3; } else { chunkedResponseMsg.setNumberOfParts(2); chunkedResponseMsg.addObjPart(e); chunkedResponseMsg.addStringPart(BaseCommand.getExceptionTrace(e)); numParts = 2; } if (logger.isDebugEnabled()) { logger.debug("{}: Sending exception chunk: ", serverConnection.getName(), e); } chunkedResponseMsg.setServerConnection(serverConnection); chunkedResponseMsg.setLastChunkAndNumParts(true, numParts); chunkedResponseMsg.sendChunk(serverConnection); } } }
/* Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package xmlobject.schematypes.checkin; import org.apache.xmlbeans.XmlObject; import org.apache.xmlbeans.XmlOptions; import org.apache.xmlbeans.XmlCursor; import org.apache.xmlbeans.XmlBeans; import org.apache.xmlbeans.XmlCalendar; import junit.framework.Test; import junit.framework.TestSuite; import junit.framework.Assert; import junit.framework.TestCase; import java.util.Date; import java.util.Calendar; import java.util.GregorianCalendar; import java.io.InputStream; import org.openuri.xstypes.test.*; import org.openuri.def.DefaultsDocument; import org.apache.xmlbeans.impl.xb.xsdschema.SchemaDocument; import javax.xml.namespace.QName; import tools.util.*; public class SchemaTypesTests extends TestCase { public SchemaTypesTests(String name) { super(name); } public static Test suite() { return new TestSuite(SchemaTypesTests.class); } private CustomerDocument doc; private void ensureDoc() throws Exception { if( doc==null ) { doc = (CustomerDocument) XmlObject.Factory.parse( JarUtil.getResourceFromJarasFile("xbean/xmlobject/person.xml")); } } public void testDefaults() throws Throwable { DefaultsDocument doc = DefaultsDocument.Factory.newInstance(); DefaultsDocument.Defaults defs = doc.addNewDefaults(); Assert.assertEquals(783, defs.getCool()); // this is the default value } public void testSourceName() throws Throwable { String name = DefaultsDocument.type.getSourceName(); Assert.assertEquals("defaults.xsd", name); InputStream str = XmlBeans.getContextTypeLoader().getSourceAsStream("defaults.xsd"); SchemaDocument doc = SchemaDocument.Factory.parse(str); Assert.assertTrue(doc.validate()); } public void testRead() throws Throwable { ensureDoc(); // Move from the root to the root customer element Person person = doc.getCustomer(); Assert.assertEquals("Howdy", person.getFirstname()); Assert.assertEquals(4, person.sizeOfNumberArray()); Assert.assertEquals(436, person.getNumberArray(0)); Assert.assertEquals(123, person.getNumberArray(1)); Assert.assertEquals(44, person.getNumberArray(2)); Assert.assertEquals(933, person.getNumberArray(3)); Assert.assertEquals(2, person.sizeOfBirthdayArray()); Assert.assertEquals(new XmlCalendar("1998-08-26Z"), person.getBirthdayArray(0)); Assert.assertEquals(new XmlCalendar("2000-08-06-08:00"), person.getBirthdayArray(1)); Person.Gender.Enum g = person.getGender(); Assert.assertEquals(Person.Gender.MALE, g); Assert.assertEquals("EGIQTWYZJ", new String(person.getHex())); Assert.assertEquals("This string is base64Binary encoded!", new String(person.getBase64())); Assert.assertEquals("GGIQTWYGG", new String(person.getHexAtt())); Assert.assertEquals("This string is base64Binary encoded!", new String(person.getBase64Att())); Assert.assertEquals("{some_uri}localname", person.getQnameAtt().toString()); Assert.assertEquals("{http://openuri.org/xstypes/test}openuri_org_localname", person.getQname().toString()); //Assert.assertEquals("http://dmoz.org/World/Fran\u00e7ais/", person.getAnyuriAtt().toString()); Assert.assertEquals("http://3space.org/space%20space/", person.getAnyuri().toString()); //RuntimeException: src/xmlstore/org/apache/xmlbeans/impl/store/Splay.java(1537): ns != null && ns.length() > 0 failed //Assert.assertEquals("JPEG", person.getNotationAtt().toString()); //Assert.assertEquals("GIF", person.getNotation().toString()); } public void testWriteRead() throws Throwable { ensureDoc(); // Move from the root to the root customer element Person person = doc.getCustomer(); person.setFirstname("George"); Assert.assertEquals("George", person.getFirstname()); person.setHex("hex encoding".getBytes()); Assert.assertEquals("hex encoding", new String(person.getHex())); person.setBase64("base64 encoded".getBytes()); Assert.assertEquals("base64 encoded", new String(person.getBase64())); person.setHexAtt("hex encoding in attributes".getBytes()); Assert.assertEquals("hex encoding in attributes", new String(person.getHexAtt())); person.setBase64Att("This string is base64Binary encoded!".getBytes()); Assert.assertEquals("This string is base64Binary encoded!", new String(person.getBase64Att())); person.setAnyuri("a.c:7001"); Assert.assertEquals("a.c:7001", person.getAnyuri()); person.setAnyuriAtt("b.d:7002"); Assert.assertEquals("b.d:7002", person.getAnyuriAtt()); person.setQnameAtt(new QName("aaa","bbb")); Assert.assertEquals("{aaa}bbb", person.getQnameAtt().toString()); person.setQname(new QName("ddd","eee")); Assert.assertEquals("{ddd}eee", person.getQname().toString()); //Exception: src/xmlstore/org/apache/xmlbeans/impl/store/Type.java(189): user == _user failed // person.setAnyuriAtt(URI.create("b.d:7002")); // Assert.assertEquals("b.d:7002", person.getAnyuriAtt().toString()); //XmlNOTATION notation = (XmlNOTATION)Person.Notation.type.createNode(); //notation.setValue("JPEG"); //person.setNotation( notation ); //Assert.assertEquals("JPEG", person.getNotation().toString()); //XmlNOTATION notationAtt = (XmlNOTATION)Person.NotationAtt.type.createNode(); //notationAtt.setValue("GIF"); //person.setNotationAtt( notationAtt ); //person.setNotationAtt(notation); //Assert.assertEquals("GIF", person.getNotationAtt().toString()); } public void testStoreWrite() throws Throwable { ensureDoc(); // Move from the root to the root customer element Person person = doc.getCustomer(); XmlObject xmlobj; XmlCursor xmlcurs; person.setFirstname("George"); xmlobj = person.xgetFirstname(); xmlcurs = xmlobj.newCursor(); Assert.assertEquals("George", xmlcurs.getTextValue() ); person.setQnameAtt( new QName("http://ggg.com","hhh") ); xmlobj = person.xgetQnameAtt(); xmlcurs = xmlobj.newCursor(); Assert.assertEquals("ggg:hhh", xmlcurs.getTextValue() ); person.setQname( new QName("http://ggg.com/gggAgain","kkk") ); xmlobj = person.xgetQname(); xmlcurs = xmlobj.newCursor(); Assert.assertEquals("ggg1:kkk", xmlcurs.getTextValue() ); person.setAnyuri( "crossgain.com" ); xmlobj = person.xgetAnyuri(); xmlcurs = xmlobj.newCursor(); Assert.assertEquals("crossgain.com", xmlcurs.getTextValue() ); person.setAnyuriAtt( "www.crossgain.com" ); xmlobj = person.xgetAnyuriAtt(); xmlcurs = xmlobj.newCursor(); Assert.assertEquals("www.crossgain.com", xmlcurs.getTextValue() ); //person.setNotation("GIF"); //xmlobj = person.getNotation(); //xmlcurs = xmlobj.newXmlCursor(); //Assert.assertEquals("GIF", xmlcurs.getText() ); //person.setNotationAtt("JPEGu"); //xmlobj = person.xgetNotationAtt(); //xmlcurs = xmlobj.newXmlCursor(); //Assert.assertEquals("JPEG", xmlcurs.getText() ); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * User: anna * Date: 15-Jan-2008 */ package com.intellij.psi.search.scope.packageSet; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtilCore; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.regex.Pattern; public class FilePatternPackageSet extends PatternBasedPackageSet { @NonNls public static final String SCOPE_FILE = "file"; private Pattern myModulePattern; private Pattern myModuleGroupPattern; private final String myPathPattern; private final Pattern myFilePattern; private final String myModulePatternText; private static final Logger LOG = Logger.getInstance("com.intellij.psi.search.scope.packageSet.FilePatternPackageSet"); public FilePatternPackageSet(@NonNls String modulePattern, @NonNls String filePattern) { myPathPattern = filePattern; myModulePatternText = modulePattern; if (modulePattern == null || modulePattern.isEmpty()) { myModulePattern = null; } else { if (modulePattern.startsWith("group:")) { int idx = modulePattern.indexOf(':', 6); if (idx == -1) idx = modulePattern.length(); myModuleGroupPattern = Pattern.compile(StringUtil.replace(modulePattern.substring(6, idx), "*", ".*")); if (idx < modulePattern.length() - 1) { myModulePattern = Pattern.compile(StringUtil.replace(modulePattern.substring(idx + 1), "*", ".*")); } } else { myModulePattern = Pattern.compile(StringUtil.replace(modulePattern, "*", ".*")); } } myFilePattern = filePattern != null ? Pattern.compile(convertToRegexp(filePattern, '/')) : null; } @Override public boolean contains(VirtualFile file, @NotNull NamedScopesHolder holder) { return contains(file, holder.getProject(), holder); } @Override public boolean contains(VirtualFile file, @NotNull Project project, @Nullable NamedScopesHolder holder) { ProjectFileIndex fileIndex = ProjectRootManager.getInstance(project).getFileIndex(); return file != null && fileMatcher(file, fileIndex, holder != null ? holder.getProjectBaseDir() : project.getBaseDir()) && matchesModule(myModuleGroupPattern, myModulePattern, file, fileIndex); } private boolean fileMatcher(VirtualFile virtualFile, ProjectFileIndex fileIndex, VirtualFile projectBaseDir){ final String relativePath = getRelativePath(virtualFile, fileIndex, true, projectBaseDir); LOG.assertTrue(relativePath != null, "vFile: " + virtualFile + "; projectBaseDir: " + projectBaseDir + "; content File: "+fileIndex.getContentRootForFile(virtualFile)); return myFilePattern.matcher(relativePath).matches(); } public static boolean matchesModule(final Pattern moduleGroupPattern, final Pattern modulePattern, final VirtualFile file, final ProjectFileIndex fileIndex) { final Module module = fileIndex.getModuleForFile(file); if (module != null) { if (modulePattern != null && modulePattern.matcher(module.getName()).matches()) return true; if (moduleGroupPattern != null) { final String[] groupPath = ModuleManager.getInstance(module.getProject()).getModuleGroupPath(module); if (groupPath != null) { for (String node : groupPath) { if (moduleGroupPattern.matcher(node).matches()) return true; } } } } return modulePattern == null && moduleGroupPattern == null; } //public for tests only public static String convertToRegexp(String aspectsntx, char separator) { StringBuffer buf = new StringBuffer(aspectsntx.length()); int cur = 0; boolean isAfterSeparator = false; boolean isAfterAsterix = false; while (cur < aspectsntx.length()) { char curChar = aspectsntx.charAt(cur); if (curChar != separator && isAfterSeparator) { buf.append("\\" + separator); isAfterSeparator = false; } if (curChar != '*' && isAfterAsterix) { buf.append(".*"); isAfterAsterix = false; } if (curChar == '*') { if (!isAfterAsterix){ isAfterAsterix = true; } else { buf.append("[^\\" + separator + "]*"); isAfterAsterix = false; } } else if (curChar == separator) { if (isAfterSeparator) { buf.append("\\" +separator+ "(.*\\" + separator + ")?"); isAfterSeparator = false; } else { isAfterSeparator = true; } } else { if (curChar == '.') { buf.append("\\"); } buf.append(curChar); } cur++; } if (isAfterAsterix){ buf.append("[^\\" + separator + "]*"); } return buf.toString(); } @Override @NotNull public PackageSet createCopy() { return new FilePatternPackageSet(myModulePatternText, myPathPattern); } @Override public int getNodePriority() { return 0; } @Override @NotNull public String getText() { @NonNls StringBuffer buf = new StringBuffer("file"); if (myModulePattern != null || myModuleGroupPattern != null) { buf.append("[").append(myModulePatternText).append("]"); } if (buf.length() > 0) { buf.append(':'); } buf.append(myPathPattern); return buf.toString(); } @Override public String getPattern() { return myPathPattern; } @Override public String getModulePattern() { return myModulePatternText; } @Override public boolean isOn(String oldQName) { return Comparing.strEqual(myPathPattern, oldQName); } @Nullable public static String getRelativePath(@NotNull VirtualFile virtualFile, @NotNull ProjectFileIndex index, final boolean useFQName, VirtualFile projectBaseDir) { final VirtualFile contentRootForFile = index.getContentRootForFile(virtualFile); if (contentRootForFile != null) { return VfsUtilCore.getRelativePath(virtualFile, contentRootForFile, '/'); } final Module module = index.getModuleForFile(virtualFile); if (module != null) { if (projectBaseDir != null) { if (VfsUtilCore.isAncestor(projectBaseDir, virtualFile, false)){ final String projectRelativePath = VfsUtilCore.getRelativePath(virtualFile, projectBaseDir, '/'); return useFQName ? projectRelativePath : projectRelativePath.substring(projectRelativePath.indexOf('/') + 1); } } return virtualFile.getPath(); } else { return getLibRelativePath(virtualFile, index); } } public static String getLibRelativePath(final VirtualFile virtualFile, final ProjectFileIndex index) { StringBuilder relativePath = new StringBuilder(100); VirtualFile directory = virtualFile; while (directory != null && index.isInLibraryClasses(directory)) { relativePath.insert(0, '/'); relativePath.insert(0, directory.getName()); directory = directory.getParent(); } return relativePath.toString(); } }
/* * Copyright (c) 2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.cluster.impl; import com.hazelcast.config.Config; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.LifecycleEvent; import com.hazelcast.core.LifecycleEvent.LifecycleState; import com.hazelcast.core.LifecycleListener; import com.hazelcast.instance.TestUtil; import com.hazelcast.nio.Address; import com.hazelcast.spi.properties.GroupProperty; import com.hazelcast.test.AssertTask; import com.hazelcast.test.HazelcastParallelParametersRunnerFactory; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.TestHazelcastInstanceFactory; import com.hazelcast.test.annotation.QuickTest; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import static com.hazelcast.instance.TestUtil.terminateInstance; import static com.hazelcast.internal.cluster.impl.ClusterDataSerializerHook.EXPLICIT_SUSPICION; import static com.hazelcast.internal.cluster.impl.ClusterDataSerializerHook.FETCH_MEMBER_LIST_STATE; import static com.hazelcast.internal.cluster.impl.ClusterDataSerializerHook.F_ID; import static com.hazelcast.internal.cluster.impl.ClusterDataSerializerHook.HEARTBEAT; import static com.hazelcast.internal.cluster.impl.ClusterDataSerializerHook.HEARTBEAT_COMPLAINT; import static com.hazelcast.internal.cluster.impl.ClusterDataSerializerHook.MEMBER_INFO_UPDATE; import static com.hazelcast.internal.cluster.impl.MembershipUpdateTest.assertMemberViewsAreSame; import static com.hazelcast.internal.cluster.impl.MembershipUpdateTest.getMemberMap; import static com.hazelcast.spi.properties.GroupProperty.HEARTBEAT_INTERVAL_SECONDS; import static com.hazelcast.spi.properties.GroupProperty.MAX_NO_HEARTBEAT_SECONDS; import static com.hazelcast.spi.properties.GroupProperty.MEMBER_LIST_PUBLISH_INTERVAL_SECONDS; import static com.hazelcast.spi.properties.GroupProperty.MERGE_FIRST_RUN_DELAY_SECONDS; import static com.hazelcast.spi.properties.GroupProperty.MERGE_NEXT_RUN_DELAY_SECONDS; import static com.hazelcast.test.PacketFiltersUtil.dropOperationsBetween; import static com.hazelcast.test.PacketFiltersUtil.dropOperationsFrom; import static com.hazelcast.test.PacketFiltersUtil.resetPacketFiltersFrom; import static java.util.Arrays.asList; import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; @RunWith(Parameterized.class) @Parameterized.UseParametersRunnerFactory(HazelcastParallelParametersRunnerFactory.class) @Category({QuickTest.class}) public class MembershipFailureTest extends HazelcastTestSupport { @Parameterized.Parameters(name = "fd:{0}") public static Collection<Object> parameters() { return Arrays.asList(new Object[]{"deadline", "phi-accrual"}); } @Parameterized.Parameter public String failureDetectorType; private TestHazelcastInstanceFactory factory; @Before public void init() { factory = createHazelcastInstanceFactory(); } @Test public void slave_shutdown() { slave_goesDown(false); } @Test public void slave_crash() { slave_goesDown(true); } private void slave_goesDown(boolean terminate) { HazelcastInstance master = newHazelcastInstance(); HazelcastInstance slave1 = newHazelcastInstance(); HazelcastInstance slave2 = newHazelcastInstance(); assertClusterSize(3, master, slave2); assertClusterSizeEventually(3, slave1); if (terminate) { terminateInstance(slave1); } else { slave1.shutdown(); } assertClusterSizeEventually(2, master, slave2); assertMasterAddress(getAddress(master), master, slave2); assertMemberViewsAreSame(getMemberMap(master), getMemberMap(slave2)); } @Test public void master_shutdown() { master_goesDown(false); } @Test public void master_crash() { master_goesDown(true); } private void master_goesDown(boolean terminate) { HazelcastInstance master = newHazelcastInstance(); HazelcastInstance slave1 = newHazelcastInstance(); HazelcastInstance slave2 = newHazelcastInstance(); assertClusterSize(3, master, slave2); assertClusterSizeEventually(3, slave1); if (terminate) { terminateInstance(master); } else { master.shutdown(); } assertClusterSizeEventually(2, slave1, slave2); assertMasterAddress(getAddress(slave1), slave1, slave2); assertMemberViewsAreSame(getMemberMap(slave1), getMemberMap(slave2)); } @Test public void masterAndMasterCandidate_crashSequentially() { masterAndMasterCandidate_crash(false); } @Test public void masterAndMasterCandidate_crashSimultaneously() { masterAndMasterCandidate_crash(true); } private void masterAndMasterCandidate_crash(boolean simultaneousCrash) { HazelcastInstance master = newHazelcastInstance(); HazelcastInstance masterCandidate = newHazelcastInstance(); HazelcastInstance slave1 = newHazelcastInstance(); HazelcastInstance slave2 = newHazelcastInstance(); assertClusterSize(4, master, slave2); assertClusterSizeEventually(4, masterCandidate, slave1); if (simultaneousCrash) { terminateInstanceAsync(master); terminateInstanceAsync(masterCandidate); } else { terminateInstance(master); terminateInstance(masterCandidate); } assertClusterSizeEventually(2, slave1, slave2); assertMasterAddress(getAddress(slave1), slave1, slave2); assertMemberViewsAreSame(getMemberMap(slave1), getMemberMap(slave2)); } private static void terminateInstanceAsync(final HazelcastInstance master) { spawn(new Runnable() { @Override public void run() { terminateInstance(master); } }); } @Test public void slaveCrash_duringMastershipClaim() { HazelcastInstance master = newHazelcastInstance(); HazelcastInstance masterCandidate = newHazelcastInstance(); HazelcastInstance slave1 = newHazelcastInstance(); HazelcastInstance slave2 = newHazelcastInstance(); assertClusterSize(4, master, slave2); assertClusterSizeEventually(4, masterCandidate, slave1); // drop FETCH_MEMBER_LIST_STATE packets to block mastership claim process dropOperationsBetween(masterCandidate, slave1, F_ID, singletonList(FETCH_MEMBER_LIST_STATE)); terminateInstance(master); final ClusterServiceImpl clusterService = getNode(masterCandidate).getClusterService(); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertTrue(clusterService.getClusterJoinManager().isMastershipClaimInProgress()); } }); sleepSeconds(3); terminateInstance(slave1); assertClusterSizeEventually(2, masterCandidate); assertClusterSizeEventually(2, slave2); assertMasterAddress(getAddress(masterCandidate), masterCandidate, slave2); assertMemberViewsAreSame(getMemberMap(masterCandidate), getMemberMap(slave2)); } @Test public void masterCandidateCrash_duringMastershipClaim() { HazelcastInstance master = newHazelcastInstance(); HazelcastInstance masterCandidate = newHazelcastInstance(); HazelcastInstance slave1 = newHazelcastInstance(); HazelcastInstance slave2 = newHazelcastInstance(); assertClusterSize(4, master, slave2); assertClusterSizeEventually(4, masterCandidate, slave1); // drop FETCH_MEMBER_LIST_STATE packets to block mastership claim process dropOperationsBetween(masterCandidate, slave1, F_ID, singletonList(FETCH_MEMBER_LIST_STATE)); terminateInstance(master); final ClusterServiceImpl clusterService = getNode(masterCandidate).getClusterService(); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertTrue(clusterService.getClusterJoinManager().isMastershipClaimInProgress()); } }); sleepSeconds(3); terminateInstance(masterCandidate); assertClusterSizeEventually(2, slave1, slave2); assertMasterAddress(getAddress(slave1), slave1, slave2); assertMemberViewsAreSame(getMemberMap(slave1), getMemberMap(slave2)); } @Test public void slave_heartbeat_timeout() { Config config = new Config().setProperty(MAX_NO_HEARTBEAT_SECONDS.getName(), "15") .setProperty(HEARTBEAT_INTERVAL_SECONDS.getName(), "1"); HazelcastInstance master = newHazelcastInstance(config); HazelcastInstance slave1 = newHazelcastInstance(config); HazelcastInstance slave2 = newHazelcastInstance(config); assertClusterSize(3, master, slave2); assertClusterSizeEventually(3, slave1); dropOperationsFrom(slave2, F_ID, singletonList(HEARTBEAT)); assertClusterSizeEventually(2, master, slave1); assertClusterSizeEventually(1, slave2); } @Test public void master_heartbeat_timeout() { Config config = new Config().setProperty(MAX_NO_HEARTBEAT_SECONDS.getName(), "15") .setProperty(HEARTBEAT_INTERVAL_SECONDS.getName(), "1") .setProperty(MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), "3"); HazelcastInstance master = newHazelcastInstance(config); HazelcastInstance slave1 = newHazelcastInstance(config); HazelcastInstance slave2 = newHazelcastInstance(config); assertClusterSize(3, master, slave2); assertClusterSizeEventually(3, slave1); dropOperationsFrom(master, F_ID, singletonList(HEARTBEAT)); dropOperationsFrom(slave1, F_ID, singletonList(HEARTBEAT_COMPLAINT)); dropOperationsFrom(slave2, F_ID, singletonList(HEARTBEAT_COMPLAINT)); assertClusterSizeEventually(1, master); assertClusterSizeEventually(2, slave1, slave2); } @Test public void heartbeat_not_sent_to_suspected_member() { Config config = new Config().setProperty(MAX_NO_HEARTBEAT_SECONDS.getName(), "10") .setProperty(HEARTBEAT_INTERVAL_SECONDS.getName(), "1"); HazelcastInstance master = newHazelcastInstance(config); HazelcastInstance slave1 = newHazelcastInstance(config); HazelcastInstance slave2 = newHazelcastInstance(config); assertClusterSize(3, master, slave2); assertClusterSizeEventually(3, slave1); // prevent heartbeat from master to slave to prevent suspect to be removed dropOperationsBetween(master, slave1, F_ID, singletonList(HEARTBEAT)); suspectMember(slave1, master); assertClusterSizeEventually(2, master, slave2); assertClusterSizeEventually(1, slave1); } @Test public void slave_heartbeat_removes_suspicion() { Config config = new Config().setProperty(MAX_NO_HEARTBEAT_SECONDS.getName(), "10") .setProperty(HEARTBEAT_INTERVAL_SECONDS.getName(), "1"); HazelcastInstance master = newHazelcastInstance(config); HazelcastInstance slave1 = newHazelcastInstance(config); final HazelcastInstance slave2 = newHazelcastInstance(config); assertClusterSize(3, master, slave2); assertClusterSizeEventually(3, slave1); dropOperationsBetween(slave2, slave1, F_ID, singletonList(HEARTBEAT)); final MembershipManager membershipManager = getNode(slave1).getClusterService().getMembershipManager(); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertTrue(membershipManager.isMemberSuspected(getAddress(slave2))); } }); resetPacketFiltersFrom(slave2); assertTrueEventually(new AssertTask() { @Override public void run() throws Exception { assertFalse(membershipManager.isMemberSuspected(getAddress(slave2))); } }); } @Test public void slave_receives_member_list_from_non_master() { String infiniteTimeout = Integer.toString(Integer.MAX_VALUE); Config config = new Config().setProperty(MAX_NO_HEARTBEAT_SECONDS.getName(), infiniteTimeout) .setProperty(MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), "5"); HazelcastInstance master = newHazelcastInstance(config); HazelcastInstance slave1 = newHazelcastInstance(config); HazelcastInstance slave2 = newHazelcastInstance(config); HazelcastInstance slave3 = newHazelcastInstance(config); assertClusterSize(4, master, slave3); assertClusterSizeEventually(4, slave1, slave2); dropOperationsFrom(master, F_ID, singletonList(HEARTBEAT)); dropOperationsFrom(slave1, F_ID, singletonList(HEARTBEAT)); dropOperationsFrom(slave2, F_ID, singletonList(HEARTBEAT)); dropOperationsFrom(slave3, F_ID, singletonList(HEARTBEAT)); suspectMember(slave2, master); suspectMember(slave2, slave1); suspectMember(slave3, master); suspectMember(slave3, slave1); assertClusterSizeEventually(2, slave2, slave3); assertMemberViewsAreSame(getMemberMap(slave2), getMemberMap(slave3)); assertClusterSizeEventually(2, master, slave1); assertMemberViewsAreSame(getMemberMap(master), getMemberMap(slave1)); } @Test public void master_candidate_has_stale_member_list() { Config config = new Config().setProperty(MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), "5"); HazelcastInstance master = newHazelcastInstance(config); HazelcastInstance slave1 = newHazelcastInstance(config); assertClusterSize(2, master); assertClusterSize(2, slave1); HazelcastInstance slave2 = newHazelcastInstance(config); assertClusterSize(3, master); assertClusterSizeEventually(3, slave1); assertClusterSize(3, slave2); dropOperationsBetween(master, slave1, F_ID, singletonList(MEMBER_INFO_UPDATE)); HazelcastInstance slave3 = newHazelcastInstance(config); assertClusterSize(4, slave3); assertClusterSizeEventually(4, slave2); assertClusterSize(3, slave1); master.getLifecycleService().terminate(); assertClusterSizeEventually(3, slave1, slave2, slave3); Address newMasterAddress = getAddress(slave1); assertMasterAddress(newMasterAddress, slave1, slave2, slave3); } @Test public void master_candidate_discovers_member_list_recursively() { Config config = new Config().setProperty(MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), "5"); HazelcastInstance master = newHazelcastInstance(config); HazelcastInstance slave1 = newHazelcastInstance(config); HazelcastInstance slave2 = newHazelcastInstance(config); assertClusterSize(3, master, slave2); assertClusterSizeEventually(3, slave1); // master, slave1, slave2 dropOperationsBetween(master, slave1, F_ID, singletonList(MEMBER_INFO_UPDATE)); HazelcastInstance slave3 = newHazelcastInstance(config); // master, slave1, slave2, slave3 assertClusterSizeEventually(4, slave3, slave2); assertClusterSize(3, slave1); dropOperationsBetween(master, asList(slave1, slave2), F_ID, singletonList(MEMBER_INFO_UPDATE)); HazelcastInstance slave4 = newHazelcastInstance(config); // master, slave1, slave2, slave3, slave4 assertClusterSizeEventually(5, slave4, slave3); assertClusterSizeEventually(4, slave2); assertClusterSize(3, slave1); dropOperationsBetween(master, asList(slave1, slave2, slave3), F_ID, singletonList(MEMBER_INFO_UPDATE)); HazelcastInstance slave5 = newHazelcastInstance(config); // master, slave1, slave2, slave3, slave4, slave5 assertClusterSize(6, slave5); assertClusterSizeEventually(6, slave4); assertClusterSizeEventually(5, slave3); assertClusterSizeEventually(4, slave2); assertClusterSize(3, slave1); master.getLifecycleService().terminate(); assertClusterSizeEventually(5, slave1, slave2, slave3); Address newMasterAddress = getAddress(slave1); assertMasterAddress(newMasterAddress, slave2, slave3, slave4, slave5); } @Test public void master_candidate_and_new_member_splits_on_master_failure() { Config config = new Config().setProperty(MAX_NO_HEARTBEAT_SECONDS.getName(), "15") .setProperty(HEARTBEAT_INTERVAL_SECONDS.getName(), "1") .setProperty(MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), "5"); HazelcastInstance master = newHazelcastInstance(config); HazelcastInstance slave1 = newHazelcastInstance(config); assertClusterSize(2, master, slave1); dropOperationsBetween(master, slave1, F_ID, singletonList(MEMBER_INFO_UPDATE)); HazelcastInstance slave2 = newHazelcastInstance(config); assertClusterSize(3, master); assertClusterSize(3, slave2); assertClusterSize(2, slave1); master.getLifecycleService().terminate(); assertClusterSizeEventually(1, slave1); assertClusterSizeEventually(1, slave2); assertMasterAddress(getAddress(slave1), slave1); assertMasterAddress(getAddress(slave2), slave2); } @Test public void slave_splits_and_eventually_merges_back() { Config config = new Config(); config.setProperty(MERGE_FIRST_RUN_DELAY_SECONDS.getName(), "15") .setProperty(MERGE_NEXT_RUN_DELAY_SECONDS.getName(), "5"); final HazelcastInstance member1 = newHazelcastInstance(config); final HazelcastInstance member2 = newHazelcastInstance(config); final HazelcastInstance member3 = newHazelcastInstance(config); assertClusterSize(3, member1, member3); assertClusterSizeEventually(3, member2); final CountDownLatch mergeLatch = new CountDownLatch(1); member3.getLifecycleService().addLifecycleListener(new LifecycleListener() { @Override public void stateChanged(LifecycleEvent event) { if (event.getState() == LifecycleState.MERGED) { mergeLatch.countDown(); } } }); suspectMember(member3, member1); suspectMember(member3, member2); assertClusterSizeEventually(1, member3); assertOpenEventually(mergeLatch); assertMemberViewsAreSame(getMemberMap(member1), getMemberMap(member2)); assertMemberViewsAreSame(getMemberMap(member1), getMemberMap(member3)); } @Test public void masterCandidate_canGracefullyShutdown_whenMasterShutdown() throws Exception { masterCandidate_canGracefullyShutdown_whenMasterGoesDown(false); } @Test public void masterCandidate_canGracefullyShutdown_whenMasterCrashes() throws Exception { masterCandidate_canGracefullyShutdown_whenMasterGoesDown(true); } private void masterCandidate_canGracefullyShutdown_whenMasterGoesDown(boolean terminate) throws Exception { Config config = new Config(); // slow down the migrations config.setProperty(GroupProperty.PARTITION_MIGRATION_INTERVAL.getName(), "1"); config.setProperty(GroupProperty.PARTITION_COUNT.getName(), "12"); HazelcastInstance master = factory.newHazelcastInstance(config); final HazelcastInstance masterCandidate = factory.newHazelcastInstance(config); HazelcastInstance slave = factory.newHazelcastInstance(config); warmUpPartitions(master, masterCandidate, slave); Future shutdownF = spawn(new Runnable() { @Override public void run() { masterCandidate.shutdown(); } }); sleepSeconds(2); if (terminate) { terminateInstance(master); } else { master.shutdown(); } shutdownF.get(); assertClusterSizeEventually(1, slave); } @Test public void secondMastershipClaimByYounger_shouldRetry_when_firstMastershipClaimByElder_accepted() { Config config = new Config(); config.setProperty(GroupProperty.MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), "5"); HazelcastInstance member1 = newHazelcastInstance(config); final HazelcastInstance member2 = newHazelcastInstance(config); HazelcastInstance member3 = newHazelcastInstance(new Config() .setProperty(GroupProperty.MASTERSHIP_CLAIM_TIMEOUT_SECONDS.getName(), "10")); final HazelcastInstance member4 = newHazelcastInstance(config); assertClusterSize(4, member1, member4); assertClusterSizeEventually(4, member2, member3); dropOperationsFrom(member1, F_ID, asList(MEMBER_INFO_UPDATE, HEARTBEAT)); dropOperationsFrom(member2, F_ID, singletonList(FETCH_MEMBER_LIST_STATE)); // If we allow explicit suspicions from member3, when member4 sends a heartbeat to member3 // after member3 splits from the cluster, member3 will send an explicit suspicion to member4 // and member4 will start its own mastership claim. dropOperationsFrom(member3, F_ID, asList(FETCH_MEMBER_LIST_STATE, EXPLICIT_SUSPICION)); suspectMember(member2, member3); suspectMember(member3, member2); suspectMember(member3, member1); suspectMember(member2, member1); suspectMember(member4, member1); suspectMember(member4, member2); // member2 will complete mastership claim, but member4 won't learn new member list dropOperationsFrom(member2, F_ID, singletonList(MEMBER_INFO_UPDATE)); // member4 should accept member2 as master during mastership claim assertMasterAddressEventually(getAddress(member2), member4); resetPacketFiltersFrom(member3); // member3 will be split when master claim timeouts assertClusterSizeEventually(1, member3); // member4 will learn member list resetPacketFiltersFrom(member2); assertClusterSizeEventually(2, member2, member4); assertMemberViewsAreSame(getMemberMap(member2), getMemberMap(member4)); resetPacketFiltersFrom(member1); assertClusterSizeEventually(1, member1); } @Test public void secondMastershipClaimByElder_shouldFail_when_firstMastershipClaimByYounger_accepted() { Config config = new Config(); config.setProperty(GroupProperty.MEMBER_LIST_PUBLISH_INTERVAL_SECONDS.getName(), "5"); HazelcastInstance member1 = newHazelcastInstance(config); final HazelcastInstance member2 = newHazelcastInstance(config); final HazelcastInstance member3 = newHazelcastInstance(config); final HazelcastInstance member4 = newHazelcastInstance(config); assertClusterSize(4, member1, member4); assertClusterSizeEventually(4, member2, member3); dropOperationsFrom(member1, F_ID, asList(MEMBER_INFO_UPDATE, HEARTBEAT)); dropOperationsFrom(member2, F_ID, asList(FETCH_MEMBER_LIST_STATE, HEARTBEAT)); dropOperationsFrom(member3, F_ID, singletonList(FETCH_MEMBER_LIST_STATE)); suspectMember(member2, member3); suspectMember(member3, member2); suspectMember(member3, member1); suspectMember(member2, member1); suspectMember(member4, member1); suspectMember(member4, member2); // member3 will complete mastership claim, but member4 won't learn new member list dropOperationsFrom(member3, F_ID, singletonList(MEMBER_INFO_UPDATE)); // member4 should accept member3 as master during mastership claim assertMasterAddressEventually(getAddress(member3), member4); resetPacketFiltersFrom(member2); // member2 will be split when master claim timeouts assertClusterSizeEventually(1, member2); // member4 will learn member list resetPacketFiltersFrom(member3); assertClusterSizeEventually(2, member3, member4); assertMemberViewsAreSame(getMemberMap(member3), getMemberMap(member4)); resetPacketFiltersFrom(member1); assertClusterSizeEventually(1, member1); } @Test public void test_whenNodesStartedTerminatedConcurrently() throws InterruptedException { newHazelcastInstance(); for (int i = 0; i < 3; i++) { startInstancesConcurrently(4); terminateRandomInstancesConcurrently(3); HazelcastInstance[] instances = getAllHazelcastInstances().toArray(new HazelcastInstance[0]); assertEquals(i + 2, instances.length); for (HazelcastInstance instance : instances) { assertClusterSizeEventually(instances.length, instance); assertMemberViewsAreSame(getMemberMap(instances[0]), getMemberMap(instance)); } } } private void startInstancesConcurrently(int count) throws InterruptedException { final CountDownLatch latch = new CountDownLatch(count); for (int i = 0; i < count; i++) { new Thread() { public void run() { newHazelcastInstance(); latch.countDown(); } }.start(); } assertTrue(latch.await(2, TimeUnit.MINUTES)); } private void terminateRandomInstancesConcurrently(int count) throws InterruptedException { List<HazelcastInstance> instances = new ArrayList<HazelcastInstance>(getAllHazelcastInstances()); assertThat(instances.size(), greaterThanOrEqualTo(count)); Collections.shuffle(instances); instances = instances.subList(0, count); final CountDownLatch latch = new CountDownLatch(count); for (final HazelcastInstance hz : instances) { new Thread() { public void run() { TestUtil.terminateInstance(hz); latch.countDown(); } }.start(); } assertTrue(latch.await(2, TimeUnit.MINUTES)); } HazelcastInstance newHazelcastInstance() { return newHazelcastInstance(new Config()); } HazelcastInstance newHazelcastInstance(Config config) { config.setProperty(GroupProperty.HEARTBEAT_FAILURE_DETECTOR_TYPE.getName(), failureDetectorType); return factory.newHazelcastInstance(config); } Collection<HazelcastInstance> getAllHazelcastInstances() { return factory.getAllHazelcastInstances(); } }
// Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.auth.ldap; import javax.naming.NamingException; import javax.naming.directory.Attribute; import javax.naming.directory.Attributes; import javax.naming.directory.DirContext; abstract class LdapType { static final LdapType RFC_2307 = new Rfc2307(); static LdapType guessType(DirContext ctx) throws NamingException { final Attributes rootAtts = ctx.getAttributes(""); Attribute supported = rootAtts.get("supportedCapabilities"); if (supported != null && (supported.contains("1.2.840.113556.1.4.800") || supported.contains("1.2.840.113556.1.4.1851"))) { return new ActiveDirectory(); } supported = rootAtts.get("supportedExtension"); if (supported != null && supported.contains("2.16.840.1.113730.3.8.10.1")) { return new FreeIPA(); } return RFC_2307; } abstract String groupPattern(); abstract String groupMemberPattern(); abstract String groupName(); abstract String accountFullName(); abstract String accountEmailAddress(); abstract String accountSshUserName(); abstract String accountMemberField(); abstract boolean accountMemberExpandGroups(); abstract String accountPattern(); private static class Rfc2307 extends LdapType { @Override String groupPattern() { return "(cn=${groupname})"; } @Override String groupMemberPattern() { return "(|(memberUid=${username})(gidNumber=${gidNumber}))"; } @Override String groupName() { return "cn"; } @Override String accountFullName() { return "displayName"; } @Override String accountEmailAddress() { return "mail"; } @Override String accountSshUserName() { return "uid"; } @Override String accountMemberField() { return null; // Not defined in RFC 2307 } @Override String accountPattern() { return "(uid=${username})"; } @Override boolean accountMemberExpandGroups() { return true; } } private static class ActiveDirectory extends LdapType { @Override String groupPattern() { return "(&(objectClass=group)(cn=${groupname}))"; } @Override String groupName() { return "cn"; } @Override String groupMemberPattern() { return null; // Active Directory uses memberOf in the account } @Override String accountFullName() { return "${givenName} ${sn}"; } @Override String accountEmailAddress() { return "mail"; } @Override String accountSshUserName() { return "${sAMAccountName.toLowerCase}"; } @Override String accountMemberField() { return "memberOf"; } @Override String accountPattern() { return "(&(objectClass=user)(sAMAccountName=${username}))"; } @Override boolean accountMemberExpandGroups() { return true; } } private static class FreeIPA extends LdapType { @Override String groupPattern() { return "(cn=${groupname})"; } @Override String groupName() { return "cn"; } @Override String groupMemberPattern() { return null; // FreeIPA uses memberOf in the account } @Override String accountFullName() { return "displayName"; } @Override String accountEmailAddress() { return "mail"; } @Override String accountSshUserName() { return "uid"; } @Override String accountMemberField() { return "memberOf"; } @Override String accountPattern() { return "(uid=${username})"; } @Override boolean accountMemberExpandGroups() { return false; } } }
package com.log4think.slidingpuzzle.view; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Point; import android.graphics.PointF; import android.os.Handler; import android.support.v4.view.MotionEventCompat; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import com.log4think.slidingpuzzle.R; import com.log4think.slidingpuzzle.utils.Log; import com.log4think.slidingpuzzle.utils.Utils; /** * @author liujinyu <simon.jinyu.liu@gmail.com> */ public class BoardView extends ViewGroup implements View.OnTouchListener { private static final String TAG = Log.tag(BoardView.class); private static final int MAX_SHUFFLE_STEPS = 50; private static final int CELL_ANIMATION_DURATION_SHORT = 100; private int childWidth, childHeight; private int cellPadding; private int colCount, rowCount; private List<CellView> cellViews; private Direction capturedDirection; private PointF lastDragPoint; private List<CellView> capturedViews; private CellView emptyView; private int activePointerId; private List<Direction> puzzlePath; private Handler handler = new Handler(); private ExecutorService executorService = Executors.newSingleThreadExecutor(); public BoardView(Context context) { super(context); init(); } public BoardView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public BoardView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(); } private void init() { childWidth = 0; childHeight = 0; colCount = 1; rowCount = 1; activePointerId = MotionEvent.INVALID_POINTER_ID; cellViews = new ArrayList<CellView>(); capturedViews = new ArrayList<CellView>(); puzzlePath = new ArrayList<Direction>(); } public void setBoardSize(int colCount, int rowCount) { this.colCount = colCount; this.rowCount = rowCount; reset(); } private void reset() { // load image List<Bitmap> slices = Utils.sliceBitmap(getContext(), R.drawable.globe, colCount, rowCount); // generate sliced cell views cellViews.clear(); for (int i = 0; i < slices.size(); i++) { CellView view = new CellView(getContext()); view.setOnTouchListener(this); view.setIndex(i); view.setCoord(i % colCount, i / colCount); view.setImageBitmap(slices.get(i)); cellViews.add(view); } // set the last view to empty space if (cellViews.size() > 0) { emptyView = cellViews.get(cellViews.size() - 1); emptyView.setEmpty(true); } slices.clear(); // add views to UI removeAllViews(); for (View cell : cellViews) { addView(cell); } } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int rw = MeasureSpec.getSize(widthMeasureSpec); int rh = MeasureSpec.getSize(heightMeasureSpec); if (colCount * rowCount == 0) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); return; } childWidth = (rw - getPaddingLeft() - getPaddingRight()) / colCount; childHeight = (rh - getPaddingTop() - getPaddingBottom()) / rowCount; // make the board to square childWidth = Math.min(childWidth, childHeight); childHeight = Math.min(childWidth, childHeight); // re-calculate dimension int vw = childWidth * colCount + getPaddingLeft() + getPaddingRight(); int vh = childHeight * rowCount + getPaddingTop() + getPaddingBottom(); setMeasuredDimension(vw, vh); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { for (CellView view : cellViews) { view.setPadding(cellPadding, cellPadding, cellPadding, cellPadding); Point p = calculateCellViewPosition(view); view.layout(p.x, p.y, p.x + childWidth, p.y + childHeight); } } private PointF getRawXY(View v, MotionEvent event) { return getRawXY(v, event, MotionEventCompat.getActionIndex(event)); } private PointF getRawXY(View v, MotionEvent event, int pointerIndex) { final int location[] = {0, 0}; v.getLocationOnScreen(location); if (pointerIndex < MotionEventCompat.getPointerCount(event)) { final float x = MotionEventCompat.getX(event, pointerIndex) + location[0]; final float y = MotionEventCompat.getY(event, pointerIndex) + location[1]; return new PointF(x, y); } else { return null; } } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { return !isEnabled(); } /** * the touch event handler of cell view * * @param v cell view * @param event * @return */ @Override public boolean onTouch(View v, MotionEvent event) { CellView view = (CellView) v; if (!isEnabled() && view.isEmpty() || !view.isInSameAxis(emptyView)) { return false; } final int action = MotionEventCompat.getActionMasked(event); switch (action) { case MotionEvent.ACTION_DOWN: { if (activePointerId == MotionEvent.INVALID_POINTER_ID) { capturedDirection = getEmptyCellDirection(view); capturedViews = getCellsToEmptyView(view); lastDragPoint = getRawXY(v, event); activePointerId = MotionEventCompat.getPointerId(event, 0); } break; } case MotionEvent.ACTION_CANCEL: case MotionEvent.ACTION_UP: { if (activePointerId == MotionEvent.INVALID_POINTER_ID) { break; } activePointerId = MotionEvent.INVALID_POINTER_ID; if (getMovedDelta() > childWidth / 2 || getMovedDelta() > childHeight / 2 // moved half way || getMovedDelta() < 5 // click ) { // move empty cell to touched cell Direction direction = getNegativeDirection(capturedDirection); for (int i = 0; i < capturedViews.size(); i++) { moveEmptyCell(direction); puzzlePath.add(direction); } // move all other captured cells to new place // moveCells(capturedViews, capturedDirection); } else { // move cell views to original place animateMoveCells(capturedViews, CELL_ANIMATION_DURATION_SHORT); } capturedDirection = null; capturedViews = null; lastDragPoint = null; break; } case MotionEvent.ACTION_MOVE: { if (activePointerId == MotionEvent.INVALID_POINTER_ID) { break; } final int pointerIndex = MotionEventCompat.findPointerIndex(event, activePointerId); PointF p = null; try { p = getRawXY(v, event, pointerIndex); } catch (Exception e) {} if (p == null) break; if (lastDragPoint != null) { moveCapturedCells(p.x - lastDragPoint.x, p.y - lastDragPoint.y); } lastDragPoint = p; break; } case MotionEvent.ACTION_POINTER_UP: { final int pointerIndex = MotionEventCompat.getActionIndex(event); final int pointerId = MotionEventCompat.getPointerId(event, pointerIndex); if (pointerId == activePointerId) { // choose another pointer final int newPointerIndex = pointerIndex == 0 ? 1 : 0; if (newPointerIndex < MotionEventCompat.getPointerCount(event)) { lastDragPoint = getRawXY(v, event, newPointerIndex); activePointerId = MotionEventCompat.getPointerId(event, newPointerIndex); } } break; } } return true; } /** * @param view * @return calculate the position of the view */ private Point calculateCellViewPosition(CellView view) { return new Point(getPaddingLeft() + view.getCol() * childWidth, getPaddingTop() + view.getRow() * childHeight); } /** * move the cells by animate * * @param views * @param duration */ private void animateMoveCells(List<CellView> views, int duration) { if (views != null) { for (CellView view : views) { animateMoveCell(view, duration); } } } /** * move the cells by animate * * @param view * @param duration */ private void animateMoveCell(CellView view, int duration) { Point p = calculateCellViewPosition(view); view.animate().x(p.x).y(p.y).setDuration(duration); } /** * move the captured cells with the touch event * * @param dx * @param dy */ private void moveCapturedCells(float dx, float dy) { if (capturedViews == null || capturedViews.size() == 0) { return; } CellView cellView = capturedViews.get(0); Point p = calculateCellViewPosition(cellView); if (capturedDirection.x != 0) { dy = 0; if (!Utils.isValueInRange(cellView.getX() + dx, p.x, p.x + capturedDirection.x * childWidth)) { return; } } else if (capturedDirection.y != 0) { dx = 0; if (!Utils.isValueInRange(cellView.getY() + dy, p.y, p.y + capturedDirection.y * childHeight)) { return; } } // set the cells position for (CellView view : capturedViews) { view.setX(view.getX() + dx); view.setY(view.getY() + dy); } } private CellView getEmptyView() { for (CellView view : cellViews) { if (view.isEmpty()) { return view; } } return null; } private CellView getView(int col, int row) { if (col < 0 || col > colCount - 1 || row < 0 || row > rowCount - 1) { return null; } for (CellView view : cellViews) { if (view.getCol() == col && view.getRow() == row) { return view; } } return null; } /** * @param view * @return all cells between specific view and empty view */ private List<CellView> getCellsToEmptyView(CellView view) { List<CellView> result = new ArrayList<CellView>(); if (emptyView.isAbove(view)) { for (int i = view.getRow(); i > emptyView.getRow(); i--) { result.add(getView(view.getCol(), i)); } } else if (emptyView.isBelow(view)) { for (int i = view.getRow(); i < emptyView.getRow(); i++) { result.add(getView(view.getCol(), i)); } } else if (emptyView.isToLeftOf(view)) { for (int i = view.getCol(); i > emptyView.getCol(); i--) { result.add(getView(i, view.getRow())); } } else if (emptyView.isToRightOf(view)) { for (int i = view.getCol(); i < emptyView.getCol(); i++) { result.add(getView(i, view.getRow())); } } return result; } private Direction getEmptyCellDirection(CellView cellView) { if (emptyView.isAbove(cellView)) { return new Direction(0, -1); } else if (emptyView.isBelow(cellView)) { return new Direction(0, 1); } else if (emptyView.isToLeftOf(cellView)) { return new Direction(-1, 0); } else if (emptyView.isToRightOf(cellView)) { return new Direction(1, 0); } else { return new Direction(0, 0); } } private int getMovedDelta() { if (capturedViews == null || capturedViews.size() == 0) { return 0; } CellView view = capturedViews.get(0); Point p = calculateCellViewPosition(view); int deltaX = (int) Math.abs(p.x - view.getX()); int deltaY = (int) Math.abs(p.y - view.getY()); if (deltaX != 0) { return deltaX; } else { return deltaY; } } private Direction getRandomDirection() { switch (Utils.randInt(4)) { case 0: return new Direction(0, 1); case 1: return new Direction(1, 0); case 2: return new Direction(0, -1); default: return new Direction(-1, 0); } } private Direction getNegativeDirection(Direction direction) { return new Direction(-direction.x, -direction.y); } private void moveCells(List<CellView> cellViews, Direction direction) { if (cellViews != null) { for (CellView view : cellViews) { view.setCoord(view.getCol() + direction.x, view.getRow() + direction.y); } } } public CellView moveEmptyCell(Direction direction) { if (emptyView == null) { return null; } int targetCol = emptyView.getCol() + direction.x; int targetRow = emptyView.getRow() + direction.y; CellView view = getView(targetCol, targetRow); if (view != null) { view.setCoord(emptyView.getCol(), emptyView.getRow()); emptyView.setCoord(targetCol, targetRow); Point p = calculateCellViewPosition(emptyView); emptyView.layout(p.x, p.y, p.x + childWidth, p.y + childHeight); animateMoveCell(view, CELL_ANIMATION_DURATION_SHORT); if (onMovedListener != null) { onMovedListener.onMoved(); } } return view; } public void setCellPadding(int cellPadding) { this.cellPadding = cellPadding; } /** * Shuffle the puzzle cells */ public void shuffle() { if (!isEnabled() || colCount == 0 || rowCount == 0 || emptyView == null) { return; } setEnabled(false); Direction lastNegativeDirection = puzzlePath.isEmpty() ? new Direction(0, 0) : getNegativeDirection(puzzlePath.get(puzzlePath.size() - 1)); int steps = Utils.randInt(MAX_SHUFFLE_STEPS) + 10; while (steps > 0) { Direction direction = getRandomDirection(); while (direction.equals(lastNegativeDirection)) { direction = getRandomDirection(); } CellView view = moveEmptyCell(direction); if (view != null) { puzzlePath.add(direction); lastNegativeDirection = getNegativeDirection(direction); steps--; } } setEnabled(true); } public void restore() { if (!isEnabled()) { return; } setEnabled(false); executorService.execute( new Runnable() { @Override public void run() { for (int i = puzzlePath.size() - 1; i >= 0; i--) { final Direction direction = getNegativeDirection(puzzlePath.get(i)); puzzlePath.remove(i); handler.post(new Runnable() { @Override public void run() { moveEmptyCell(direction); } }); try { Thread.sleep(300); } catch (InterruptedException e) { e.printStackTrace(); } } // finish puzzlePath.clear(); handler.post(new Runnable() { @Override public void run() { setEnabled(true); } }); } }); } public int getPuzzleSteps() { return puzzlePath.size(); } /** * a class used to describe the movement direction * x: -1 left, +1 right * y: -1 top, +1 bottom */ public class Direction { public int x, y; public Direction(int x, int y) { this.x = Utils.signum(x); this.y = Utils.signum(y); } @Override public boolean equals(Object o) { if (this == o) return true; if (o instanceof Direction) { Direction d = (Direction) o; return this.x == d.x && this.y == d.y; } else { return false; } } } public interface OnMovedListener { void onMoved(); } public void setOnMovedListener(OnMovedListener onMovedListener) { this.onMovedListener = onMovedListener; } private OnMovedListener onMovedListener; }
/* Copyright 2015 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.google.security.zynamics.binnavi.Gui.GraphWindows.NodeTaggingTree; import com.google.common.base.Preconditions; import com.google.security.zynamics.binnavi.Gui.HotKeys; import com.google.security.zynamics.binnavi.Gui.Actions.CActionProxy; import com.google.security.zynamics.binnavi.Gui.DragAndDrop.CDefaultTransferHandler; import com.google.security.zynamics.binnavi.Gui.DragAndDrop.IDropHandler; import com.google.security.zynamics.binnavi.Gui.GraphWindows.NodeTaggingTree.Actions.CAddRootTagNodeAction; import com.google.security.zynamics.binnavi.Gui.GraphWindows.NodeTaggingTree.DragDrop.CTagSortingHandler; import com.google.security.zynamics.binnavi.Gui.GraphWindows.NodeTaggingTree.Implementations.CTagFunctions; import com.google.security.zynamics.binnavi.Gui.GraphWindows.NodeTaggingTree.Nodes.CTagTreeNode; import com.google.security.zynamics.binnavi.Gui.GraphWindows.NodeTaggingTree.Nodes.CTaggedGraphNodeNode; import com.google.security.zynamics.binnavi.Gui.GraphWindows.NodeTaggingTree.Nodes.CTaggedGraphNodesContainerNode; import com.google.security.zynamics.binnavi.Gui.GraphWindows.NodeTaggingTree.Nodes.ITagTreeNode; import com.google.security.zynamics.binnavi.Help.CHelpFunctions; import com.google.security.zynamics.binnavi.Help.IHelpInformation; import com.google.security.zynamics.binnavi.Help.IHelpProvider; import com.google.security.zynamics.binnavi.Tagging.ITagManager; import com.google.security.zynamics.binnavi.yfileswrap.Gui.GraphWindows.NodeTaggingTree.Nodes.CRootTagTreeNode; import com.google.security.zynamics.binnavi.yfileswrap.zygraph.NaviNode; import com.google.security.zynamics.binnavi.yfileswrap.zygraph.ZyGraph; import com.google.security.zynamics.zylib.gui.SwingInvoker; import com.google.security.zynamics.zylib.gui.dndtree.DNDTree; import com.google.security.zynamics.zylib.gui.jtree.TreeHelpers; import com.google.security.zynamics.zylib.gui.zygraph.IZyGraphSelectionListener; import com.google.security.zynamics.zylib.gui.zygraph.IZyGraphVisibilityListener; import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.functions.MoveFunctions; import com.google.security.zynamics.zylib.yfileswrap.gui.zygraph.functions.ZoomFunctions; import java.awt.dnd.DnDConstants; import java.awt.event.ActionEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Enumeration; import java.util.List; import java.util.Objects; import javax.swing.AbstractAction; import javax.swing.InputMap; import javax.swing.JComponent; import javax.swing.JFrame; import javax.swing.JPopupMenu; import javax.swing.event.TreeModelEvent; import javax.swing.event.TreeModelListener; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.DefaultTreeSelectionModel; import javax.swing.tree.TreePath; import javax.swing.tree.TreeSelectionModel; /** * Tree where the available node tags are shown in graph windows. */ public final class CTagsTree extends DNDTree implements IHelpProvider { /** * Used for serialization. */ private static final long serialVersionUID = -1074808285623025354L; /** * Parent window used for dialogs. */ private final JFrame m_parent; /** * Graph shown in the window. */ private final ZyGraph m_graph; /** * Provides tag information. */ private final ITagManager m_tagManager; /** * Model of the tags tree. */ private final CTagsTreeModel m_model; /** * Root node of the tags tree. This is an invisible dummy node. */ private final CRootTagTreeNode m_rootNode; /** * Handles clicks on the tree. */ private final InternalMouseListener m_mouseListener = new InternalMouseListener(); /** * Updates the tree GUI on selection changes in the graph. */ private final InternalGraphSelectionListener m_graphSelectionListener = new InternalGraphSelectionListener(); /** * Updates the tree GUI on visibility changes in the graph. */ private final InternalGraphVisibilityListener m_graphVisibilityListener = new InternalGraphVisibilityListener(); /** * The last selection path that can actually be selected. */ private TreePath m_lastValidSelectionPath = null; /** * The last selected node that can actually be selected. */ private CTagTreeNode m_lastValidSelectedNode = null; /** * Creates a new tree object. * * @param parent Parent window used for dialogs. * @param graph Graph shown in the window. * @param manager Provides tag information. */ public CTagsTree(final JFrame parent, final ZyGraph graph, final ITagManager manager) { m_parent = Preconditions.checkNotNull(parent, "IE02308: Perent argument can not be null"); m_graph = Preconditions.checkNotNull(graph, "IE01776: Graph can not be null"); m_tagManager = Preconditions.checkNotNull(manager, "IE01777: Manager argument can not be null"); m_model = new CTagsTreeModel(this); setModel(m_model); getModel().addTreeModelListener(new InternalModelListener()); addMouseListener(m_mouseListener); m_graph.addListener(m_graphSelectionListener); m_graph.addListener(m_graphVisibilityListener); setRootVisible(false); m_rootNode = new CRootTagTreeNode(parent, this, graph, m_tagManager); m_model.setRoot(m_rootNode); setCellRenderer(new CTagTreeCellRenderer()); // ATTENTION: UNDER NO CIRCUMSTANCES MOVE THIS LINE // ABOVE THE SETROOT LINE m_model.nodeStructureChanged(m_rootNode); final List<IDropHandler> handlers = new ArrayList<IDropHandler>(); handlers.add(new CTagSortingHandler()); new CDefaultTransferHandler(this, DnDConstants.ACTION_COPY_OR_MOVE, handlers); final DefaultTreeSelectionModel selectionModel = new DefaultTreeSelectionModel(); selectionModel.setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION); setSelectionModel(selectionModel); final InputMap windowImap = getInputMap(JComponent.WHEN_FOCUSED); windowImap.put(HotKeys.DELETE_HK.getKeyStroke(), "DELETE"); getActionMap().put("DELETE", CActionProxy.proxy(new DeleteAction())); } /** * Shows the context menu for a given mouse event. * * @param event The mouse event that triggered the popup menu. */ private void showPopupMenu(final MouseEvent event) { final ITagTreeNode selectedNode = (ITagTreeNode) TreeHelpers.getNodeAt(this, event.getX(), event.getY()); if (selectedNode == null) { // Show the default menu final JPopupMenu popupMenu = new JPopupMenu(); popupMenu.add(CActionProxy.proxy(new CAddRootTagNodeAction(m_parent, m_tagManager, m_rootNode .getTag()))); popupMenu.show(this, event.getX(), event.getY()); } else { final JPopupMenu menu = selectedNode.getPopupMenu(); if (menu != null) { menu.show(this, event.getX(), event.getY()); } } } /** * Frees allocated resources. */ public void dispose() { m_rootNode.dispose(); removeMouseListener(m_mouseListener); m_graph.removeListener(m_graphSelectionListener); m_graph.removeListener(m_graphVisibilityListener); } @Override public IHelpInformation getHelpInformation() { return new IHelpInformation() { @Override public String getText() { return "This tree is used to configure and assign node tags. " + "Once you have assigned tags to nodes you can use this " + "tree to quickly select all nodes tagged with given tags."; } @Override public URL getUrl() { return CHelpFunctions.urlify(CHelpFunctions.MAIN_WINDOW_FILE); } }; } @Override public CTagsTreeModel getModel() { return m_model; } /** * Action class used to delete the selected tag. */ private class DeleteAction extends AbstractAction { @Override public void actionPerformed(final ActionEvent event) { final Object component = getLastSelectedPathComponent(); if (component instanceof CTagTreeNode) { CTagFunctions.deleteTag(m_parent, m_tagManager, ((CTagTreeNode) component).getTag()); } } } /** * Updates the tree GUI on selection changes in the graph. */ private class InternalGraphSelectionListener implements IZyGraphSelectionListener { @Override public void selectionChanged() { } } /** * Updates the tree GUI on visibility changes in the graph. */ private class InternalGraphVisibilityListener implements IZyGraphVisibilityListener { @Override public void nodeDeleted() { } @Override public void visibilityChanged() { } } /** * Makes sure to update the tree if the model changed. */ private class InternalModelListener implements TreeModelListener { /** * Updates the tree. */ private void update() { if (m_lastValidSelectedNode == null) { validate(); } else { final Integer tagId = (Integer) m_lastValidSelectedNode.getUserObject(); final Enumeration<?> nodes = m_rootNode.breadthFirstEnumeration(); while (nodes.hasMoreElements()) { final DefaultMutableTreeNode node = (DefaultMutableTreeNode) nodes.nextElement(); if (Objects.equals(node.getUserObject(), tagId)) { new SwingInvoker() { @Override protected void operation() { m_lastValidSelectionPath = new TreePath(getModel().getPathToRoot(node)); getSelectionModel().setSelectionPath(m_lastValidSelectionPath); } }; return; } } } } @Override public void treeNodesChanged(final TreeModelEvent event) { update(); } @Override public void treeNodesInserted(final TreeModelEvent event) { update(); } @Override public void treeNodesRemoved(final TreeModelEvent event) { update(); } @Override public void treeStructureChanged(final TreeModelEvent event) { update(); } } /** * Handles clicks on the tree. */ private class InternalMouseListener extends MouseAdapter { @Override public void mousePressed(final MouseEvent event) { if (event.isPopupTrigger()) { showPopupMenu(event); } else { final int y = event.getY(); final int x = event.getX(); final TreePath path = getPathForLocation(x, y); if (path == null) { return; } final Object treenode = path.getLastPathComponent(); if (event.getButton() == 1) { if (treenode instanceof CTagTreeNode) { m_lastValidSelectedNode = (CTagTreeNode) treenode; m_lastValidSelectionPath = path; // avoids flickering ((CTagTreeCellRenderer) getCellRenderer()).setSelectedNode(m_lastValidSelectedNode); } else if (treenode instanceof CTaggedGraphNodesContainerNode) { final CTaggedGraphNodesContainerNode containerNode = (CTaggedGraphNodesContainerNode) treenode; final Collection<NaviNode> nodes = containerNode.getGraphNodes(); boolean select = false; int countunselected = 0; int countinvisible = 0; for (final NaviNode nn : nodes) { if (!nn.getRawNode().isSelected()) { select = true; countunselected++; } if (!nn.getRawNode().isVisible()) { countinvisible++; } } if (((countinvisible == countunselected) || !select) && !m_graph.getSettings().getProximitySettings().getProximityBrowsingFrozen()) { m_graph.selectNodes(nodes, select); } else { final Collection<NaviNode> visiblenodes = new ArrayList<NaviNode>(); for (final NaviNode nn : nodes) { if (nn.isVisible()) { visiblenodes.add(nn); } } m_graph.selectNodes(visiblenodes, select); } } else if (treenode instanceof CTaggedGraphNodeNode) { final CTaggedGraphNodeNode graphNode = (CTaggedGraphNodeNode) treenode; final boolean graphNodeSelected = graphNode.getGraphNode().getRawNode().isSelected(); if (!(m_graph.getSettings().getProximitySettings().getProximityBrowsingFrozen() && !graphNode .getGraphNode().isVisible())) { m_graph.selectNode(graphNode.getGraphNode(), !graphNodeSelected); } } new SwingInvoker() { @Override protected void operation() { getSelectionModel().setSelectionPath(m_lastValidSelectionPath); } }.invokeLater(); } } } @Override public void mouseReleased(final MouseEvent event) { if (event.isPopupTrigger()) { showPopupMenu(event); } final int y = event.getY(); final int x = event.getX(); final TreePath path = getPathForLocation(x, y); if (path == null) { return; } final Object treenode = path.getLastPathComponent(); if ((event.getButton() == 3) && (treenode instanceof CTaggedGraphNodeNode)) { final CTaggedGraphNodeNode treeNode = (CTaggedGraphNodeNode) treenode; final NaviNode graphNode = treeNode.getGraphNode(); if (graphNode.isVisible()) { if (event.getClickCount() == 1) { MoveFunctions.centerNode(m_graph, graphNode); } else if (event.getClickCount() == 2) { ZoomFunctions.zoomToNode(m_graph, graphNode); } } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.security; import static org.apache.solr.security.AuditEvent.EventType.ANONYMOUS; import static org.apache.solr.security.AuditEvent.EventType.ERROR; import com.fasterxml.jackson.annotation.JsonIgnore; import java.lang.invoke.MethodHandles; import java.security.Principal; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.servlet.ServletUtils; import org.apache.solr.servlet.SolrRequestParsers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.MDC; /** * Audit event that takes request and auth context as input to be able to audit log custom things. * This interface may change in next release and is marked experimental * * @since 8.1.0 * @lucene.experimental */ public class AuditEvent { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private String baseUrl; private String nodeName; private String message; private Level level; private Date date; private String username; private String session; private String clientIp; private List<String> collections; private Map<String, Object> context; private Map<String, String> headers; private Map<String, List<String>> solrParams = new HashMap<>(); private String solrHost; private int solrPort; private String solrIp; private String resource; private String httpMethod; private String httpQueryString; private EventType eventType; private AuthorizationResponse autResponse; private RequestType requestType; private double qTime = -1; private int status = -1; private Throwable exception; /* Predefined event types. Custom types can be made through constructor */ public enum EventType { AUTHENTICATED("Authenticated", "User successfully authenticated", Level.INFO, -1), REJECTED("Rejected", "Authentication request rejected", Level.WARN, 401), ANONYMOUS("Anonymous", "Request proceeds with unknown user", Level.INFO, -1), ANONYMOUS_REJECTED("AnonymousRejected", "Request from unknown user rejected", Level.WARN, 401), AUTHORIZED("Authorized", "Authorization succeeded", Level.INFO, -1), UNAUTHORIZED("Unauthorized", "Authorization failed", Level.WARN, 403), COMPLETED("Completed", "Request completed", Level.INFO, 200), ERROR("Error", "Request was not executed due to an error", Level.ERROR, 500); public final String message; public String explanation; public final Level level; public int status; EventType(String message, String explanation, Level level, int status) { this.message = message; this.explanation = explanation; this.level = level; this.status = status; } } /** * Empty event, must be filled by user using setters. Message and Loglevel will be initialized * from EventType but can be overridden with setters afterwards. * * @param eventType a predefined or custom EventType */ public AuditEvent(EventType eventType) { this.date = new Date(); this.eventType = eventType; this.status = eventType.status; this.level = eventType.level; this.message = eventType.message; } public AuditEvent(EventType eventType, HttpServletRequest httpRequest) { this(eventType, null, httpRequest); } // Constructor for testing and deserialization only protected AuditEvent() {} /** * Event based on an HttpServletRequest, typically used during authentication. Solr will fill in * details such as ip, http method etc from the request, and username if Principal exists on the * request. * * @param eventType a predefined or custom EventType * @param httpRequest the request to initialize from */ public AuditEvent(EventType eventType, Throwable exception, HttpServletRequest httpRequest) { this(eventType); this.solrHost = httpRequest.getLocalName(); this.solrPort = httpRequest.getLocalPort(); this.solrIp = httpRequest.getLocalAddr(); this.clientIp = httpRequest.getRemoteAddr(); this.httpMethod = httpRequest.getMethod(); this.httpQueryString = httpRequest.getQueryString(); this.headers = getHeadersFromRequest(httpRequest); this.baseUrl = httpRequest.getRequestURL().toString(); this.nodeName = MDC.get(ZkStateReader.NODE_NAME_PROP); SolrRequestParsers.parseQueryString(httpQueryString) .forEach( sp -> { this.solrParams.put(sp.getKey(), Arrays.asList(sp.getValue())); }); setResource(ServletUtils.getPathAfterContext(httpRequest)); setRequestType(findRequestType()); if (exception != null) setException(exception); Principal principal = httpRequest.getUserPrincipal(); if (principal != null) { this.username = httpRequest.getUserPrincipal().getName(); } else if (eventType.equals(EventType.AUTHENTICATED)) { this.eventType = ANONYMOUS; this.message = ANONYMOUS.message; this.level = ANONYMOUS.level; log.debug( "Audit event type changed from AUTHENTICATED to ANONYMOUS since no Principal found on request"); } } /** * Event based on request and AuthorizationContext. Solr will fill in details such as collections, * ip, http method etc from the context. * * @param eventType a predefined or custom EventType * @param httpRequest the request to initialize from * @param authorizationContext the context to initialize from */ public AuditEvent( EventType eventType, HttpServletRequest httpRequest, AuthorizationContext authorizationContext) { this(eventType, httpRequest); this.collections = authorizationContext.getCollectionRequests().stream() .map(r -> r.collectionName) .collect(Collectors.toList()); setResource(authorizationContext.getResource()); this.requestType = RequestType.convertType(authorizationContext.getRequestType()); if (authorizationContext.getParams() != null) { authorizationContext .getParams() .forEach( p -> { this.solrParams.put(p.getKey(), Arrays.asList(p.getValue())); }); } } /** * Event to log completed requests. Takes time and status. Solr will fill in details such as * collections, ip, http method etc from the HTTP request and context. * * @param eventType a predefined or custom EventType * @param httpRequest the request to initialize from * @param authorizationContext the context to initialize from * @param qTime query time * @param exception exception from query response, or null if OK */ public AuditEvent( EventType eventType, HttpServletRequest httpRequest, AuthorizationContext authorizationContext, double qTime, Throwable exception) { this(eventType, httpRequest, authorizationContext); setQTime(qTime); setException(exception); } private HashMap<String, String> getHeadersFromRequest(HttpServletRequest httpRequest) { HashMap<String, String> h = new HashMap<>(); Enumeration<String> headersEnum = httpRequest.getHeaderNames(); while (headersEnum != null && headersEnum.hasMoreElements()) { String name = headersEnum.nextElement(); h.put(name, httpRequest.getHeader(name)); } return h; } public enum Level { INFO, // Used for normal successful events WARN, // Used when a user is blocked etc ERROR // Used when there is an exception or error during auth / authz } public enum RequestType { ADMIN, SEARCH, UPDATE, STREAMING, UNKNOWN; static RequestType convertType(AuthorizationContext.RequestType ctxReqType) { switch (ctxReqType) { case ADMIN: return RequestType.ADMIN; case READ: return RequestType.SEARCH; case WRITE: return RequestType.UPDATE; default: return RequestType.UNKNOWN; } } } /** The human readable message about this event */ public String getMessage() { return message; } /** * Level of this event. Can be INFO, WARN or ERROR * * @return {@link Level} enum */ public Level getLevel() { return level; } /** Date that the event happened */ public Date getDate() { return date; } /** Username of logged in user, or null if no authenticated user */ public String getUsername() { return username; } /** Session identifier */ public String getSession() { return session; } /** IP address of the client doing the request */ public String getClientIp() { return clientIp; } /** A general purpose context map with potential extra information about the event */ public Map<String, Object> getContext() { return context; } /** List of collection names involved in request */ public List<String> getCollections() { return collections; } /** * Identifies the resource being operated on. This is not the same as URL path. For queries the * resource is relative to collection name, e.g. /select or /update. For other events the resource * may be /api/node/health or /admin/collection */ public String getResource() { return resource; } /** The HTTP method. E.g. GET, POST, PUT */ public String getHttpMethod() { return httpMethod; } /** Query part of URL or null if query part */ public String getHttpQueryString() { return httpQueryString; } /** * EventType tells the outcome of the event such as REJECTED, UNAUTHORIZED or ERROR * * @return {@link EventType} enum */ public EventType getEventType() { return eventType; } /** Host name of the Solr node logging the event */ public String getSolrHost() { return solrHost; } /** IP address of the Solr node logging the event */ public String getSolrIp() { return solrIp; } /** Port number of the Solr node logging the event */ public int getSolrPort() { return solrPort; } /** Map of all HTTP request headers belonging to the request */ public Map<String, String> getHeaders() { return headers; } /** Map of all Solr request parameters attached to the request. Pulled from url */ public Map<String, List<String>> getSolrParams() { return solrParams; } /** * Gets first value of a certain Solr request parameter * * @param key name of request parameter to retrieve * @return String value of the first value, regardless of number of valies */ public String getSolrParamAsString(String key) { List<String> v = getSolrParams().get(key); if (v != null && v.size() > 0) { return String.valueOf((v).get(0)); } return null; } /** The authorization response object from authorization plugin, or null authz has not happened */ public AuthorizationResponse getAutResponse() { return autResponse; } /** Node name of Solr node, on the internal format host:port_context, e.g. 10.0.0.1:8983_solr */ public String getNodeName() { return nodeName; } /** * Determines the type of request. Can be ADMIN, SEARCH, UPDATE, STREAMING, UNKNOWN * * @return {@link RequestType} enum */ public RequestType getRequestType() { return requestType; } /** HTTP status code of event, i.e. 200 = OK, 401 = unauthorized */ public int getStatus() { return status; } /** Request time in milliseconds for completed requests */ public double getQTime() { return qTime; } /** In case of ERROR event, find the exception causing the error */ public Throwable getException() { return exception; } /** * Get baseUrl as StringBuffer for back compat with previous version * * @deprecated Please use {@link #getBaseUrl()} instead * @return StringBuffer of the base url without query part */ @Deprecated @JsonIgnore public StringBuffer getRequestUrl() { return new StringBuffer(baseUrl); } /** * Full URL of the original request. This is {@link #baseUrl} + "?" + {@link #httpQueryString}. * Returns null if not set */ public String getUrl() { if (baseUrl == null) return null; return baseUrl + (httpQueryString != null ? "?" + httpQueryString : ""); } /** First part of URL of the request, but not including request parameters, or null if not set */ public String getBaseUrl() { return baseUrl; } // Setters, builder style public AuditEvent setBaseUrl(String baseUrl) { this.baseUrl = baseUrl; return this; } public AuditEvent setSession(String session) { this.session = session; return this; } public AuditEvent setClientIp(String clientIp) { this.clientIp = clientIp; return this; } public AuditEvent setContext(Map<String, Object> context) { this.context = context; return this; } public AuditEvent setContextEntry(String key, Object value) { this.context.put(key, value); return this; } public AuditEvent setMessage(String message) { this.message = message; return this; } public AuditEvent setLevel(Level level) { this.level = level; return this; } public AuditEvent setDate(Date date) { this.date = date; return this; } public AuditEvent setUsername(String username) { this.username = username; return this; } public AuditEvent setCollections(List<String> collections) { this.collections = collections; return this; } public AuditEvent setResource(String resource) { this.resource = normalizeResourcePath(resource); return this; } public AuditEvent setHttpMethod(String httpMethod) { this.httpMethod = httpMethod; return this; } public AuditEvent setHttpQueryString(String httpQueryString) { this.httpQueryString = httpQueryString; return this; } public AuditEvent setSolrHost(String solrHost) { this.solrHost = solrHost; return this; } public AuditEvent setSolrPort(int solrPort) { this.solrPort = solrPort; return this; } public AuditEvent setSolrIp(String solrIp) { this.solrIp = solrIp; return this; } public AuditEvent setHeaders(Map<String, String> headers) { this.headers = headers; return this; } public AuditEvent setSolrParams(Map<String, List<String>> solrParams) { this.solrParams = solrParams; return this; } public AuditEvent setAutResponse(AuthorizationResponse autResponse) { this.autResponse = autResponse; return this; } public AuditEvent setRequestType(RequestType requestType) { this.requestType = requestType; return this; } public AuditEvent setQTime(double qTime) { this.qTime = qTime; return this; } public AuditEvent setStatus(int status) { this.status = status; return this; } public AuditEvent setException(Throwable exception) { this.exception = exception; if (exception != null) { this.eventType = ERROR; this.level = ERROR.level; this.message = ERROR.message; if (exception instanceof SolrException) status = ((SolrException) exception).code(); } return this; } private RequestType findRequestType() { if (resource == null) return RequestType.UNKNOWN; if (SEARCH_PATH_PATTERNS.stream().anyMatch(p -> p.matcher(resource).matches())) return RequestType.SEARCH; if (INDEXING_PATH_PATTERNS.stream().anyMatch(p -> p.matcher(resource).matches())) return RequestType.UPDATE; if (STREAMING_PATH_PATTERNS.stream().anyMatch(p -> p.matcher(resource).matches())) return RequestType.STREAMING; if (ADMIN_PATH_PATTERNS.stream().anyMatch(p -> p.matcher(resource).matches())) return RequestType.ADMIN; return RequestType.UNKNOWN; } protected String normalizeResourcePath(String resourcePath) { if (resourcePath == null) return ""; return resourcePath.replaceFirst("^/____v2", "/api"); } private static final List<String> ADMIN_PATH_REGEXES = Arrays.asList( "^/admin/.*", "^/api/(c|collections)$", "^/api/(c|collections)/[^/]+/config$", "^/api/(c|collections)/[^/]+/schema$", "^/api/(c|collections)/[^/]+/shards.*", "^/api/cores.*$", "^/api/node.*$", "^/api/cluster.*$"); private static final List<String> STREAMING_PATH_REGEXES = Collections.singletonList(".*/stream.*"); private static final List<String> INDEXING_PATH_REGEXES = Collections.singletonList(".*/update.*"); private static final List<String> SEARCH_PATH_REGEXES = Arrays.asList(".*/select.*", ".*/query.*"); private static final List<Pattern> ADMIN_PATH_PATTERNS = ADMIN_PATH_REGEXES.stream().map(Pattern::compile).collect(Collectors.toList()); private static final List<Pattern> STREAMING_PATH_PATTERNS = STREAMING_PATH_REGEXES.stream().map(Pattern::compile).collect(Collectors.toList()); private static final List<Pattern> INDEXING_PATH_PATTERNS = INDEXING_PATH_REGEXES.stream().map(Pattern::compile).collect(Collectors.toList()); private static final List<Pattern> SEARCH_PATH_PATTERNS = SEARCH_PATH_REGEXES.stream().map(Pattern::compile).collect(Collectors.toList()); }
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.plugin.infra.monitor; import java.io.File; import java.util.Random; import com.googlecode.junit.ext.checkers.OSChecker; import com.thoughtworks.go.util.SystemEnvironment; import org.apache.commons.io.FileUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentCaptor; import org.mockito.InOrder; import static com.thoughtworks.go.util.FileUtil.recreateDirectory; import static com.thoughtworks.go.util.SystemEnvironment.PLUGIN_EXTERNAL_PROVIDED_PATH; import static com.thoughtworks.go.util.SystemEnvironment.PLUGIN_LOCATION_MONITOR_INTERVAL_IN_SECONDS; import static com.thoughtworks.go.util.SystemEnvironment.PLUGIN_GO_PROVIDED_PATH; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class DefaultExternalPluginJarLocationMonitorTest extends AbstractDefaultPluginJarLocationMonitorTest { private static OSChecker WINDOWS = new OSChecker(OSChecker.WINDOWS); private static final Random RANDOM = new Random(); private File PLUGIN_BUNDLED_DIR; private File PLUGIN_EXTERNAL_DIR; private DefaultPluginJarLocationMonitor monitor; private PluginJarChangeListener changeListener; private SystemEnvironment systemEnvironment; @Before public void setUp() throws Exception { if (WINDOWS.satisfy()) { return; } String pluginBundledDirName = "./tmp-bundled-DEPJLMT" + RANDOM.nextInt(); PLUGIN_BUNDLED_DIR = new File(pluginBundledDirName); recreateDirectory(PLUGIN_BUNDLED_DIR); String pluginExternalDirName = "./tmp-external-DEPJLMT" + RANDOM.nextInt(); PLUGIN_EXTERNAL_DIR = new File(pluginExternalDirName); recreateDirectory(PLUGIN_EXTERNAL_DIR); systemEnvironment = mock(SystemEnvironment.class); when(systemEnvironment.get(PLUGIN_LOCATION_MONITOR_INTERVAL_IN_SECONDS)).thenReturn(1); when(systemEnvironment.get(PLUGIN_GO_PROVIDED_PATH)).thenReturn(pluginBundledDirName); when(systemEnvironment.get(PLUGIN_EXTERNAL_PROVIDED_PATH)).thenReturn(pluginExternalDirName); changeListener = mock(PluginJarChangeListener.class); monitor = new DefaultPluginJarLocationMonitor(systemEnvironment); monitor.initialize(); } @After public void tearDown() throws Exception { if (WINDOWS.satisfy()) { return; } monitor.stop(); FileUtils.deleteQuietly(PLUGIN_BUNDLED_DIR); FileUtils.deleteQuietly(PLUGIN_EXTERNAL_DIR); } @Test public void shouldCreateExternalPluginDirectoryIfItDoesNotExist() throws Exception { if (WINDOWS.satisfy()) { return; } PLUGIN_EXTERNAL_DIR.delete(); new DefaultPluginJarLocationMonitor(systemEnvironment).initialize(); assertThat(PLUGIN_EXTERNAL_DIR.exists(), is(true)); } @Test public void shouldThrowUpWhenExternalPluginDirectoryCreationFails() throws Exception { if (WINDOWS.satisfy()) { return; } when(systemEnvironment.get(PLUGIN_EXTERNAL_PROVIDED_PATH)).thenReturn("/xyz"); try { new DefaultPluginJarLocationMonitor(systemEnvironment).initialize(); fail("should have failed for missing external plugin folder"); } catch (RuntimeException e) { assertThat(e.getMessage(),is("Failed to create external plugins folder in location /xyz")); } } @Test public void shouldDetectNewlyAddedPluginJar() throws Exception { if (WINDOWS.satisfy()) { return; } monitor.addPluginJarChangeListener(changeListener); monitor.start(); copyPluginToThePluginDirectory(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-plugin-2.jar"); waitAMoment(); verify(changeListener).pluginJarAdded(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-plugin-2.jar", false)); verifyNoMoreInteractions(changeListener); } @Test public void shouldDetectOnlyJarsAsNewPlugins() throws Exception { if (WINDOWS.satisfy()) { return; } monitor.addPluginJarChangeListener(changeListener); monitor.start(); copyPluginToThePluginDirectory(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-plugin.something-other-than-jar.zip"); waitAMoment(); verifyNoMoreInteractions(changeListener); } @Test public void shouldDetectRemovedPluginJar() throws Exception { if (WINDOWS.satisfy()) { return; } monitor.addPluginJarChangeListener(changeListener); monitor.start(); copyPluginToThePluginDirectory(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-plugin-2.jar"); waitAMoment(); verify(changeListener).pluginJarAdded(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-plugin-2.jar", false)); FileUtils.deleteQuietly(new File(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-plugin-2.jar")); waitAMoment(); verify(changeListener).pluginJarRemoved(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-plugin-2.jar", false)); verifyNoMoreInteractions(changeListener); } @Test public void shouldNotifyListenerOfMultiplePluginFilesAdded() throws Exception { if (WINDOWS.satisfy()) { return; } monitor.addPluginJarChangeListener(changeListener); monitor.start(); copyPluginToThePluginDirectory(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar"); waitAMoment(); verify(changeListener).pluginJarAdded(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar", false)); copyPluginToThePluginDirectory(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-2.jar"); copyPluginToThePluginDirectory(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-3.jar"); waitAMoment(); verify(changeListener).pluginJarAdded(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-2.jar", false)); verify(changeListener).pluginJarAdded(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-3.jar", false)); verifyNoMoreInteractions(changeListener); } @Test public void shouldNotifyListenerOfMultiplePluginFilesRemoved() throws Exception { if (WINDOWS.satisfy()) { return; } monitor.addPluginJarChangeListener(changeListener); monitor.start(); copyPluginToThePluginDirectory(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar"); copyPluginToThePluginDirectory(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-2.jar"); copyPluginToThePluginDirectory(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-3.jar"); waitAMoment(); verify(changeListener).pluginJarAdded(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar", false)); verify(changeListener).pluginJarAdded(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-2.jar", false)); verify(changeListener).pluginJarAdded(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-3.jar", false)); FileUtils.deleteQuietly(new File(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar")); FileUtils.deleteQuietly(new File(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-2.jar")); waitAMoment(); verify(changeListener).pluginJarRemoved(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar", false)); verify(changeListener).pluginJarRemoved(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-2.jar", false)); verifyNoMoreInteractions(changeListener); } @Test public void shouldNotifyRemoveEventBeforeAddEventInCaseOfFileRename() throws Exception { if (WINDOWS.satisfy()) { return; } monitor.addPluginJarChangeListener(changeListener); monitor.start(); copyPluginToThePluginDirectory(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar"); waitAMoment(); PluginFileDetails orgExternalFile = pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar", false); verify(changeListener).pluginJarAdded(orgExternalFile); PluginFileDetails newExternalFile = pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1-new.jar", false); FileUtils.moveFile(orgExternalFile.file(), newExternalFile.file()); waitAMoment(); InOrder inOrder = inOrder(changeListener); inOrder.verify(changeListener).pluginJarRemoved(orgExternalFile); inOrder.verify(changeListener).pluginJarAdded(newExternalFile); verifyNoMoreInteractions(changeListener); } @Test public void shouldNotifyListenersOfUpdatesToPluginJars() throws Exception { if (WINDOWS.satisfy()) { return; } monitor.addPluginJarChangeListener(changeListener); monitor.start(); copyPluginToThePluginDirectory(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin.jar"); waitAMoment(); verify(changeListener).pluginJarAdded(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin.jar", false)); updateFileContents(new File(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin.jar")); waitAMoment(); verify(changeListener).pluginJarUpdated(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin.jar", false)); verifyNoMoreInteractions(changeListener); } @Test public void shouldAlwaysHandleBundledPluginsAheadOfExternalPlugins() throws Exception { if (WINDOWS.satisfy()) { return; } monitor.addPluginJarChangeListener(changeListener); monitor.start(); copyPluginToThePluginDirectory(PLUGIN_BUNDLED_DIR, "descriptor-aware-test-bundled-plugin-1.jar"); copyPluginToThePluginDirectory(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar"); waitAMoment(); InOrder jarAddedOrder = inOrder(changeListener); jarAddedOrder.verify(changeListener).pluginJarAdded(pluginFileDetails(PLUGIN_BUNDLED_DIR, "descriptor-aware-test-bundled-plugin-1.jar", true)); jarAddedOrder.verify(changeListener).pluginJarAdded(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar", false)); updateFileContents(new File(PLUGIN_BUNDLED_DIR, "descriptor-aware-test-bundled-plugin-1.jar")); updateFileContents(new File(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar")); waitAMoment(); InOrder jarUpdatedOrder = inOrder(changeListener); jarUpdatedOrder.verify(changeListener).pluginJarUpdated(pluginFileDetails(PLUGIN_BUNDLED_DIR, "descriptor-aware-test-bundled-plugin-1.jar", true)); jarUpdatedOrder.verify(changeListener).pluginJarUpdated(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar", false)); FileUtils.deleteQuietly(new File(PLUGIN_BUNDLED_DIR, "descriptor-aware-test-bundled-plugin-1.jar")); FileUtils.deleteQuietly(new File(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar")); waitAMoment(); InOrder jarRemovedOrder = inOrder(changeListener); jarRemovedOrder.verify(changeListener).pluginJarRemoved(pluginFileDetails(PLUGIN_BUNDLED_DIR, "descriptor-aware-test-bundled-plugin-1.jar", true)); jarRemovedOrder.verify(changeListener).pluginJarRemoved(pluginFileDetails(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar", false)); verifyNoMoreInteractions(changeListener); } @Test public void shouldSpecifyIfPluginIsBundledOrExternalWhenAdded() throws Exception { if (WINDOWS.satisfy()) { return; } monitor.addPluginJarChangeListener(changeListener); monitor.start(); copyPluginToThePluginDirectory(PLUGIN_BUNDLED_DIR, "descriptor-aware-test-bundled-plugin-1.jar"); copyPluginToThePluginDirectory(PLUGIN_EXTERNAL_DIR, "descriptor-aware-test-external-plugin-1.jar"); ArgumentCaptor<PluginFileDetails> pluginFileDetailsArgumentCaptor = new ArgumentCaptor<PluginFileDetails>(); waitAMoment(); verify(changeListener, times(2)).pluginJarAdded(pluginFileDetailsArgumentCaptor.capture()); assertThat(pluginFileDetailsArgumentCaptor.getAllValues().get(0).isBundledPlugin(), is(true)); assertThat(pluginFileDetailsArgumentCaptor.getAllValues().get(1).isBundledPlugin(), is(false)); verifyNoMoreInteractions(changeListener); } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.coprocessor; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.TreeMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationProtos; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.ipc.protobuf.generated.TestProtos; import org.apache.hadoop.hbase.ipc.protobuf.generated.TestRpcServiceProtos; import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.Bytes; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; /** * TestEndpoint: test cases to verify coprocessor Endpoint */ @Category({CoprocessorTests.class, MediumTests.class}) public class TestCoprocessorEndpoint { private static final Log LOG = LogFactory.getLog(TestCoprocessorEndpoint.class); private static final TableName TEST_TABLE = TableName.valueOf("TestCoprocessorEndpoint"); private static final byte[] TEST_FAMILY = Bytes.toBytes("TestFamily"); private static final byte[] TEST_QUALIFIER = Bytes.toBytes("TestQualifier"); private static byte[] ROW = Bytes.toBytes("testRow"); private static final int ROWSIZE = 20; private static final int rowSeperator1 = 5; private static final int rowSeperator2 = 12; private static byte[][] ROWS = makeN(ROW, ROWSIZE); private static HBaseTestingUtility util = new HBaseTestingUtility(); @BeforeClass public static void setupBeforeClass() throws Exception { // set configure to indicate which cp should be loaded Configuration conf = util.getConfiguration(); conf.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 5000); conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, org.apache.hadoop.hbase.coprocessor.ColumnAggregationEndpoint.class.getName(), ProtobufCoprocessorService.class.getName()); conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, ProtobufCoprocessorService.class.getName()); util.startMiniCluster(2); Admin admin = util.getAdmin(); HTableDescriptor desc = new HTableDescriptor(TEST_TABLE); desc.addFamily(new HColumnDescriptor(TEST_FAMILY)); admin.createTable(desc, new byte[][]{ROWS[rowSeperator1], ROWS[rowSeperator2]}); util.waitUntilAllRegionsAssigned(TEST_TABLE); Table table = util.getConnection().getTable(TEST_TABLE); for (int i = 0; i < ROWSIZE; i++) { Put put = new Put(ROWS[i]); put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i)); table.put(put); } table.close(); } @AfterClass public static void tearDownAfterClass() throws Exception { util.shutdownMiniCluster(); } private Map<byte [], Long> sum(final Table table, final byte [] family, final byte [] qualifier, final byte [] start, final byte [] end) throws ServiceException, Throwable { return table.coprocessorService(ColumnAggregationProtos.ColumnAggregationService.class, start, end, new Batch.Call<ColumnAggregationProtos.ColumnAggregationService, Long>() { @Override public Long call(ColumnAggregationProtos.ColumnAggregationService instance) throws IOException { CoprocessorRpcUtils.BlockingRpcCallback<ColumnAggregationProtos.SumResponse> rpcCallback = new CoprocessorRpcUtils.BlockingRpcCallback<>(); ColumnAggregationProtos.SumRequest.Builder builder = ColumnAggregationProtos.SumRequest.newBuilder(); builder.setFamily(ByteStringer.wrap(family)); if (qualifier != null && qualifier.length > 0) { builder.setQualifier(ByteStringer.wrap(qualifier)); } instance.sum(null, builder.build(), rpcCallback); return rpcCallback.get().getSum(); } }); } @Test public void testAggregation() throws Throwable { Table table = util.getConnection().getTable(TEST_TABLE); Map<byte[], Long> results = sum(table, TEST_FAMILY, TEST_QUALIFIER, ROWS[0], ROWS[ROWS.length-1]); int sumResult = 0; int expectedResult = 0; for (Map.Entry<byte[], Long> e : results.entrySet()) { LOG.info("Got value "+e.getValue()+" for region "+Bytes.toStringBinary(e.getKey())); sumResult += e.getValue(); } for (int i = 0; i < ROWSIZE; i++) { expectedResult += i; } assertEquals("Invalid result", expectedResult, sumResult); results.clear(); // scan: for region 2 and region 3 results = sum(table, TEST_FAMILY, TEST_QUALIFIER, ROWS[rowSeperator1], ROWS[ROWS.length-1]); sumResult = 0; expectedResult = 0; for (Map.Entry<byte[], Long> e : results.entrySet()) { LOG.info("Got value "+e.getValue()+" for region "+Bytes.toStringBinary(e.getKey())); sumResult += e.getValue(); } for (int i = rowSeperator1; i < ROWSIZE; i++) { expectedResult += i; } assertEquals("Invalid result", expectedResult, sumResult); table.close(); } @Test public void testCoprocessorService() throws Throwable { Table table = util.getConnection().getTable(TEST_TABLE); List<HRegionLocation> regions; try(RegionLocator rl = util.getConnection().getRegionLocator(TEST_TABLE)) { regions = rl.getAllRegionLocations(); } final TestProtos.EchoRequestProto request = TestProtos.EchoRequestProto.newBuilder().setMessage("hello").build(); final Map<byte[], String> results = Collections.synchronizedMap( new TreeMap<byte[], String>(Bytes.BYTES_COMPARATOR)); try { // scan: for all regions final RpcController controller = new ServerRpcController(); table.coprocessorService(TestRpcServiceProtos.TestProtobufRpcProto.class, ROWS[0], ROWS[ROWS.length - 1], new Batch.Call<TestRpcServiceProtos.TestProtobufRpcProto, TestProtos.EchoResponseProto>() { public TestProtos.EchoResponseProto call(TestRpcServiceProtos.TestProtobufRpcProto instance) throws IOException { LOG.debug("Default response is " + TestProtos.EchoRequestProto.getDefaultInstance()); CoprocessorRpcUtils.BlockingRpcCallback<TestProtos.EchoResponseProto> callback = new CoprocessorRpcUtils.BlockingRpcCallback<>(); instance.echo(controller, request, callback); TestProtos.EchoResponseProto response = callback.get(); LOG.debug("Batch.Call returning result " + response); return response; } }, new Batch.Callback<TestProtos.EchoResponseProto>() { public void update(byte[] region, byte[] row, TestProtos.EchoResponseProto result) { assertNotNull(result); assertEquals("hello", result.getMessage()); results.put(region, result.getMessage()); } } ); for (Map.Entry<byte[], String> e : results.entrySet()) { LOG.info("Got value "+e.getValue()+" for region "+Bytes.toStringBinary(e.getKey())); } assertEquals(3, results.size()); for (HRegionLocation info : regions) { LOG.info("Region info is "+info.getRegionInfo().getRegionNameAsString()); assertTrue(results.containsKey(info.getRegionInfo().getRegionName())); } results.clear(); // scan: for region 2 and region 3 table.coprocessorService(TestRpcServiceProtos.TestProtobufRpcProto.class, ROWS[rowSeperator1], ROWS[ROWS.length - 1], new Batch.Call<TestRpcServiceProtos.TestProtobufRpcProto, TestProtos.EchoResponseProto>() { public TestProtos.EchoResponseProto call(TestRpcServiceProtos.TestProtobufRpcProto instance) throws IOException { LOG.debug("Default response is " + TestProtos.EchoRequestProto.getDefaultInstance()); CoprocessorRpcUtils.BlockingRpcCallback<TestProtos.EchoResponseProto> callback = new CoprocessorRpcUtils.BlockingRpcCallback<>(); instance.echo(controller, request, callback); TestProtos.EchoResponseProto response = callback.get(); LOG.debug("Batch.Call returning result " + response); return response; } }, new Batch.Callback<TestProtos.EchoResponseProto>() { public void update(byte[] region, byte[] row, TestProtos.EchoResponseProto result) { assertNotNull(result); assertEquals("hello", result.getMessage()); results.put(region, result.getMessage()); } } ); for (Map.Entry<byte[], String> e : results.entrySet()) { LOG.info("Got value "+e.getValue()+" for region "+Bytes.toStringBinary(e.getKey())); } assertEquals(2, results.size()); } finally { table.close(); } } @Test public void testCoprocessorServiceNullResponse() throws Throwable { Table table = util.getConnection().getTable(TEST_TABLE); List<HRegionLocation> regions; try(RegionLocator rl = util.getConnection().getRegionLocator(TEST_TABLE)) { regions = rl.getAllRegionLocations(); } final TestProtos.EchoRequestProto request = TestProtos.EchoRequestProto.newBuilder().setMessage("hello").build(); try { // scan: for all regions final RpcController controller = new ServerRpcController(); // test that null results are supported Map<byte[], String> results = table.coprocessorService(TestRpcServiceProtos.TestProtobufRpcProto.class, ROWS[0], ROWS[ROWS.length - 1], new Batch.Call<TestRpcServiceProtos.TestProtobufRpcProto, String>() { public String call(TestRpcServiceProtos.TestProtobufRpcProto instance) throws IOException { CoprocessorRpcUtils.BlockingRpcCallback<TestProtos.EchoResponseProto> callback = new CoprocessorRpcUtils.BlockingRpcCallback<>(); instance.echo(controller, request, callback); TestProtos.EchoResponseProto response = callback.get(); LOG.debug("Batch.Call got result " + response); return null; } } ); for (Map.Entry<byte[], String> e : results.entrySet()) { LOG.info("Got value "+e.getValue()+" for region "+Bytes.toStringBinary(e.getKey())); } assertEquals(3, results.size()); for (HRegionLocation region : regions) { HRegionInfo info = region.getRegionInfo(); LOG.info("Region info is "+info.getRegionNameAsString()); assertTrue(results.containsKey(info.getRegionName())); assertNull(results.get(info.getRegionName())); } } finally { table.close(); } } @Test public void testMasterCoprocessorService() throws Throwable { Admin admin = util.getAdmin(); final TestProtos.EchoRequestProto request = TestProtos.EchoRequestProto.newBuilder().setMessage("hello").build(); TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface service = TestRpcServiceProtos.TestProtobufRpcProto.newBlockingStub(admin.coprocessorService()); assertEquals("hello", service.echo(null, request).getMessage()); } @Test public void testCoprocessorError() throws Exception { Configuration configuration = new Configuration(util.getConfiguration()); // Make it not retry forever configuration.setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 1); Table table = util.getConnection().getTable(TEST_TABLE); try { CoprocessorRpcChannel protocol = table.coprocessorService(ROWS[0]); TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface service = TestRpcServiceProtos.TestProtobufRpcProto.newBlockingStub(protocol); service.error(null, TestProtos.EmptyRequestProto.getDefaultInstance()); fail("Should have thrown an exception"); } catch (ServiceException e) { } finally { table.close(); } } @Test public void testMasterCoprocessorError() throws Throwable { Admin admin = util.getAdmin(); TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface service = TestRpcServiceProtos.TestProtobufRpcProto.newBlockingStub(admin.coprocessorService()); try { service.error(null, TestProtos.EmptyRequestProto.getDefaultInstance()); fail("Should have thrown an exception"); } catch (ServiceException e) { } } private static byte[][] makeN(byte[] base, int n) { byte[][] ret = new byte[n][]; for (int i = 0; i < n; i++) { ret[i] = Bytes.add(base, Bytes.toBytes(String.format("%02d", i))); } return ret; } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.query.groupby.epinephelinae; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Predicate; import com.google.common.base.Supplier; import com.google.common.collect.Lists; import io.druid.collections.ResourceHolder; import io.druid.common.guava.SettableSupplier; import io.druid.data.input.Row; import io.druid.java.util.common.Pair; import io.druid.java.util.common.guava.Accumulator; import io.druid.java.util.common.guava.BaseSequence; import io.druid.java.util.common.guava.CloseQuietly; import io.druid.java.util.common.guava.FilteredSequence; import io.druid.java.util.common.guava.Sequence; import io.druid.query.Query; import io.druid.query.ResourceLimitExceededException; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.filter.Filter; import io.druid.query.filter.ValueMatcher; import io.druid.query.groupby.GroupByQuery; import io.druid.query.groupby.GroupByQueryConfig; import io.druid.query.groupby.RowBasedColumnSelectorFactory; import io.druid.query.groupby.epinephelinae.RowBasedGrouperHelper.RowBasedKey; import io.druid.query.groupby.resource.GroupByQueryResource; import io.druid.segment.column.ValueType; import io.druid.segment.filter.BooleanValueMatcher; import io.druid.segment.filter.Filters; import org.joda.time.DateTime; import org.joda.time.Interval; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.nio.ByteBuffer; import java.util.List; import java.util.Map; import java.util.UUID; public class GroupByRowProcessor { public static Sequence<Row> process( final Query queryParam, final Sequence<Row> rows, final Map<String, ValueType> rowSignature, final GroupByQueryConfig config, final GroupByQueryResource resource, final ObjectMapper spillMapper, final String processingTmpDir ) { final GroupByQuery query = (GroupByQuery) queryParam; final GroupByQueryConfig querySpecificConfig = config.withOverrides(query); final AggregatorFactory[] aggregatorFactories = new AggregatorFactory[query.getAggregatorSpecs().size()]; for (int i = 0; i < query.getAggregatorSpecs().size(); i++) { aggregatorFactories[i] = query.getAggregatorSpecs().get(i); } final File temporaryStorageDirectory = new File( processingTmpDir, String.format("druid-groupBy-%s_%s", UUID.randomUUID(), query.getId()) ); final List<Interval> queryIntervals = query.getIntervals(); final Filter filter = Filters.convertToCNFFromQueryContext( query, Filters.toFilter(query.getDimFilter()) ); final SettableSupplier<Row> rowSupplier = new SettableSupplier<>(); final RowBasedColumnSelectorFactory columnSelectorFactory = RowBasedColumnSelectorFactory.create( rowSupplier, rowSignature ); final ValueMatcher filterMatcher = filter == null ? BooleanValueMatcher.of(true) : filter.makeMatcher(columnSelectorFactory); final FilteredSequence<Row> filteredSequence = new FilteredSequence<>( rows, new Predicate<Row>() { @Override public boolean apply(Row input) { boolean inInterval = false; DateTime rowTime = input.getTimestamp(); for (Interval queryInterval : queryIntervals) { if (queryInterval.contains(rowTime)) { inInterval = true; break; } } if (!inInterval) { return false; } rowSupplier.set(input); return filterMatcher.matches(); } } ); return new BaseSequence<>( new BaseSequence.IteratorMaker<Row, CloseableGrouperIterator<RowBasedKey, Row>>() { @Override public CloseableGrouperIterator<RowBasedKey, Row> make() { // This contains all closeable objects which are closed when the returned iterator iterates all the elements, // or an exceptions is thrown. The objects are closed in their reverse order. final List<Closeable> closeOnExit = Lists.newArrayList(); try { final LimitedTemporaryStorage temporaryStorage = new LimitedTemporaryStorage( temporaryStorageDirectory, querySpecificConfig.getMaxOnDiskStorage() ); closeOnExit.add(temporaryStorage); Pair<Grouper<RowBasedKey>, Accumulator<AggregateResult, Row>> pair = RowBasedGrouperHelper.createGrouperAccumulatorPair( query, true, rowSignature, querySpecificConfig, new Supplier<ByteBuffer>() { @Override public ByteBuffer get() { final ResourceHolder<ByteBuffer> mergeBufferHolder = resource.getMergeBuffer(); closeOnExit.add(mergeBufferHolder); return mergeBufferHolder.get(); } }, -1, temporaryStorage, spillMapper, aggregatorFactories ); final Grouper<RowBasedKey> grouper = pair.lhs; final Accumulator<AggregateResult, Row> accumulator = pair.rhs; closeOnExit.add(grouper); final AggregateResult retVal = filteredSequence.accumulate(AggregateResult.ok(), accumulator); if (!retVal.isOk()) { throw new ResourceLimitExceededException(retVal.getReason()); } return RowBasedGrouperHelper.makeGrouperIterator( grouper, query, new Closeable() { @Override public void close() throws IOException { for (Closeable closeable : Lists.reverse(closeOnExit)) { CloseQuietly.close(closeable); } } } ); } catch (Throwable e) { // Exception caught while setting up the iterator; release resources. for (Closeable closeable : Lists.reverse(closeOnExit)) { CloseQuietly.close(closeable); } throw e; } } @Override public void cleanup(CloseableGrouperIterator<RowBasedKey, Row> iterFromMake) { iterFromMake.close(); } } ); } }
package com.easd.tools.iriomote; /* * #%L * resttest * %% * Copyright (C) 2014 EA Systems Dresden GmbH * %% * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * #L% */ import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.WebResource.Builder; import com.sun.jersey.api.client.filter.GZIPContentEncodingFilter; import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter; import java.util.List; import java.util.Map; import javax.ws.rs.core.MediaType; /** * * @author mmauksch */ public class GUI extends javax.swing.JFrame { private boolean authCBChecked; private boolean gzipCBchecked; /** * Creates new form GUI */ public GUI() { authCBChecked = false; gzipCBchecked = false; initComponents(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { tpMain = new javax.swing.JTabbedPane(); jPanel1 = new javax.swing.JPanel(); btTextPost = new javax.swing.JButton(); jScrollPane1 = new javax.swing.JScrollPane(); taPostText = new javax.swing.JTextArea(); jPanel2 = new javax.swing.JPanel(); jLabel2 = new javax.swing.JLabel(); jPanel3 = new javax.swing.JPanel(); tpResult = new javax.swing.JTabbedPane(); jPanel4 = new javax.swing.JPanel(); jLabel5 = new javax.swing.JLabel(); lbResponse = new javax.swing.JLabel(); jLabel6 = new javax.swing.JLabel(); lbSize = new javax.swing.JLabel(); jScrollPane2 = new javax.swing.JScrollPane(); taPrettyResponse = new javax.swing.JTextArea(); jPanel5 = new javax.swing.JPanel(); jScrollPane4 = new javax.swing.JScrollPane(); taRawRequest = new javax.swing.JTextArea(); jPanel6 = new javax.swing.JPanel(); jScrollPane3 = new javax.swing.JScrollPane(); taResponseRaw = new javax.swing.JTextArea(); jLabel1 = new javax.swing.JLabel(); tfAddress = new javax.swing.JTextField(); btGet = new javax.swing.JButton(); cbAuth = new javax.swing.JCheckBox(); jLabel3 = new javax.swing.JLabel(); tfUser = new javax.swing.JTextField(); jLabel4 = new javax.swing.JLabel(); tfPassword = new javax.swing.JPasswordField(); cbGZip = new javax.swing.JCheckBox(); setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); tpMain.setName(""); // NOI18N btTextPost.setText("Post"); btTextPost.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btTextPostActionPerformed(evt); } }); taPostText.setColumns(20); taPostText.setRows(5); jScrollPane1.setViewportView(taPostText); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 604, Short.MAX_VALUE) .addComponent(btTextPost, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addContainerGap()) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createSequentialGroup() .addContainerGap() .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 340, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(btTextPost) .addContainerGap()) ); tpMain.addTab("Post(Text)", jPanel1); jLabel2.setText("Not implemented yet"); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(234, 234, 234) .addComponent(jLabel2) .addContainerGap(246, Short.MAX_VALUE)) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(84, 84, 84) .addComponent(jLabel2) .addContainerGap(296, Short.MAX_VALUE)) ); tpMain.addTab("Post(Binary)", jPanel2); jLabel5.setText("Response: "); lbResponse.setText("none"); jLabel6.setText("Size: "); lbSize.setText("none"); taPrettyResponse.setEditable(false); taPrettyResponse.setColumns(20); taPrettyResponse.setRows(5); jScrollPane2.setViewportView(taPrettyResponse); javax.swing.GroupLayout jPanel4Layout = new javax.swing.GroupLayout(jPanel4); jPanel4.setLayout(jPanel4Layout); jPanel4Layout.setHorizontalGroup( jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel4Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 599, Short.MAX_VALUE) .addGroup(jPanel4Layout.createSequentialGroup() .addComponent(jLabel5) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(lbResponse) .addGap(18, 18, 18) .addComponent(jLabel6) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(lbSize) .addGap(0, 0, Short.MAX_VALUE))) .addContainerGap()) ); jPanel4Layout.setVerticalGroup( jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel4Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel5) .addComponent(lbResponse) .addComponent(jLabel6) .addComponent(lbSize)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 323, Short.MAX_VALUE) .addContainerGap()) ); tpResult.addTab("Pretty", jPanel4); taRawRequest.setEditable(false); taRawRequest.setColumns(20); taRawRequest.setRows(5); taRawRequest.setText("not implemented yet"); jScrollPane4.setViewportView(taRawRequest); javax.swing.GroupLayout jPanel5Layout = new javax.swing.GroupLayout(jPanel5); jPanel5.setLayout(jPanel5Layout); jPanel5Layout.setHorizontalGroup( jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel5Layout.createSequentialGroup() .addContainerGap() .addComponent(jScrollPane4, javax.swing.GroupLayout.DEFAULT_SIZE, 599, Short.MAX_VALUE) .addContainerGap()) ); jPanel5Layout.setVerticalGroup( jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel5Layout.createSequentialGroup() .addContainerGap() .addComponent(jScrollPane4, javax.swing.GroupLayout.DEFAULT_SIZE, 344, Short.MAX_VALUE) .addContainerGap()) ); tpResult.addTab("Raw (Request)", jPanel5); taResponseRaw.setEditable(false); taResponseRaw.setColumns(20); taResponseRaw.setRows(5); jScrollPane3.setViewportView(taResponseRaw); javax.swing.GroupLayout jPanel6Layout = new javax.swing.GroupLayout(jPanel6); jPanel6.setLayout(jPanel6Layout); jPanel6Layout.setHorizontalGroup( jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel6Layout.createSequentialGroup() .addContainerGap() .addComponent(jScrollPane3, javax.swing.GroupLayout.DEFAULT_SIZE, 599, Short.MAX_VALUE) .addContainerGap()) ); jPanel6Layout.setVerticalGroup( jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel6Layout.createSequentialGroup() .addContainerGap() .addComponent(jScrollPane3, javax.swing.GroupLayout.DEFAULT_SIZE, 344, Short.MAX_VALUE) .addContainerGap()) ); tpResult.addTab("Raw (Response)", jPanel6); javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3); jPanel3.setLayout(jPanel3Layout); jPanel3Layout.setHorizontalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(tpResult, javax.swing.GroupLayout.Alignment.TRAILING) ); jPanel3Layout.setVerticalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(tpResult) ); tpMain.addTab("Result", jPanel3); jLabel1.setText("Address: "); tfAddress.setText("http://rest.example.org"); btGet.setText("Get"); btGet.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { btGetActionPerformed(evt); } }); cbAuth.setText("HTTPAuth?"); cbAuth.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { cbAuthItemStateChanged(evt); } }); jLabel3.setText("Name: "); tfUser.setText("User"); jLabel4.setText("Password: "); tfPassword.setText("Password"); cbGZip.setText("GZip?"); cbGZip.addItemListener(new java.awt.event.ItemListener() { public void itemStateChanged(java.awt.event.ItemEvent evt) { cbGZipItemStateChanged(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(tpMain) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addGap(0, 0, Short.MAX_VALUE) .addComponent(jLabel1) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(tfAddress, javax.swing.GroupLayout.PREFERRED_SIZE, 475, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(18, 18, 18) .addComponent(btGet)) .addGroup(layout.createSequentialGroup() .addComponent(cbAuth) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel3) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(tfUser, javax.swing.GroupLayout.PREFERRED_SIZE, 99, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel4) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(tfPassword, javax.swing.GroupLayout.PREFERRED_SIZE, 130, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(cbGZip) .addGap(0, 0, Short.MAX_VALUE))) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel1) .addComponent(tfAddress, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(btGet)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(cbAuth) .addComponent(jLabel3) .addComponent(jLabel4) .addComponent(tfUser, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(tfPassword, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(cbGZip)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(tpMain) .addContainerGap()) ); pack(); }// </editor-fold>//GEN-END:initComponents private Builder configureBuilder() { Client clientToUse; //Creates a client that doesn't check the validity of ssl certifcates. //Should only be used in a trusted environment! clientToUse = Client.create(ClientHelper.configureClient()); //Create standard client: //clientToUse = Client.create(); if(authCBChecked) { clientToUse.addFilter(new HTTPBasicAuthFilter(tfUser.getText(), tfPassword.getText())); } if(gzipCBchecked) { clientToUse.addFilter(new GZIPContentEncodingFilter(true)); } WebResource web = clientToUse.resource(tfAddress.getText()); WebResource.Builder builder = web.type(MediaType.APPLICATION_JSON); //builder.accept(MediaType.APPLICATION_JSON); return builder; } private void handleClientResponse(ClientResponse response) { StringBuilder rawBuilder = new StringBuilder(); lbResponse.setText(Integer.toString(response.getStatus())); lbSize.setText(Integer.toString(response.getLength())); for(Map.Entry<String, List<String>> entry : response.getHeaders().entrySet()) { rawBuilder.append(entry.getKey()); rawBuilder.append(": "); for(int i = 0; i < entry.getValue().size(); i++) { rawBuilder.append(entry.getValue().get(i)); rawBuilder.append((i==entry.getValue().size()-1)?";\n":", "); } } String entity = response.getEntity(String.class); rawBuilder.append(entity); taResponseRaw.setText(rawBuilder.toString()); taPrettyResponse.setText(entity); tpMain.setSelectedIndex(2); tpResult.setSelectedIndex(0); } private void btTextPostActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btTextPostActionPerformed Builder builder = configureBuilder(); handleClientResponse(builder.post(ClientResponse.class, taPostText.getText())); }//GEN-LAST:event_btTextPostActionPerformed private void btGetActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btGetActionPerformed Builder builder = configureBuilder(); handleClientResponse(builder.get(ClientResponse.class)); }//GEN-LAST:event_btGetActionPerformed private void cbAuthItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_cbAuthItemStateChanged if(evt.getSource() == cbAuth) { authCBChecked = !authCBChecked; } }//GEN-LAST:event_cbAuthItemStateChanged private void cbGZipItemStateChanged(java.awt.event.ItemEvent evt) {//GEN-FIRST:event_cbGZipItemStateChanged if(evt.getSource() == cbGZip) { gzipCBchecked = !gzipCBchecked; } }//GEN-LAST:event_cbGZipItemStateChanged // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton btGet; private javax.swing.JButton btTextPost; private javax.swing.JCheckBox cbAuth; private javax.swing.JCheckBox cbGZip; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JPanel jPanel1; private javax.swing.JPanel jPanel2; private javax.swing.JPanel jPanel3; private javax.swing.JPanel jPanel4; private javax.swing.JPanel jPanel5; private javax.swing.JPanel jPanel6; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JScrollPane jScrollPane2; private javax.swing.JScrollPane jScrollPane3; private javax.swing.JScrollPane jScrollPane4; private javax.swing.JLabel lbResponse; private javax.swing.JLabel lbSize; private javax.swing.JTextArea taPostText; private javax.swing.JTextArea taPrettyResponse; private javax.swing.JTextArea taRawRequest; private javax.swing.JTextArea taResponseRaw; private javax.swing.JTextField tfAddress; private javax.swing.JPasswordField tfPassword; private javax.swing.JTextField tfUser; private javax.swing.JTabbedPane tpMain; private javax.swing.JTabbedPane tpResult; // End of variables declaration//GEN-END:variables }
/** * NOTE: This copyright does *not* cover user programs that use HQ * program services by normal system calls through the application * program interfaces provided as part of the Hyperic Plug-in Development * Kit or the Hyperic Client Development Kit - this is merely considered * normal use of the program, and does *not* fall under the heading of * "derived work". * * Copyright (C) [2009-2010], VMware, Inc. * This file is part of HQ. * * HQ is free software; you can redistribute it and/or modify * it under the terms version 2 of the GNU General Public License as * published by the Free Software Foundation. This program is distributed * in the hope that it will be useful, but WITHOUT ANY WARRANTY; without * even the implied warranty of MERCHANTABILITY or FITNESS FOR A * PARTICULAR PURPOSE. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA. * */ package org.hyperic.hq.measurement.server.mbean; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.Properties; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hibernate.SessionFactory; import org.hibernate.engine.SessionFactoryImplementor; import org.hyperic.hibernate.dialect.HQDialect; import org.hyperic.hq.common.shared.HQConstants; import org.hyperic.hq.common.shared.ServerConfigManager; import org.hyperic.hq.measurement.MeasurementConstants; import org.hyperic.hq.measurement.server.session.DataPoint; import org.hyperic.hq.measurement.server.session.Measurement; import org.hyperic.hq.measurement.server.session.MeasurementUnionStatementBuilder; import org.hyperic.hq.measurement.shared.DataManager; import org.hyperic.hq.measurement.shared.MeasurementManager; import org.hyperic.hq.product.MetricValue; import org.hyperic.util.StringUtil; import org.hyperic.util.TimeUtil; import org.hyperic.util.jdbc.DBUtil; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jmx.export.annotation.ManagedOperation; import org.springframework.jmx.export.annotation.ManagedResource; import org.springframework.stereotype.Service; /** * MBean used for testing purposes. When the populate() method is invoked, * we will look up all measurements that are currently scheduled, filling in * the detailed measurement data to simulate an environment that has been * running for as long as the 'keep detailed metric data' setting. * * */ @ManagedResource("hyperic.jmx:type=Service,name=DataPopulator") @Service public class DataPopulatorService implements DataPopulatorServiceMBean { private final Log log = LogFactory.getLog(DataPopulatorService.class); private DBUtil dbUtil; private MeasurementManager measurementManager; private DataManager dataManager; private ServerConfigManager serverConfigManager; private SessionFactory sessionFactory; @Autowired public DataPopulatorService(DBUtil dbUtil, MeasurementManager measurementManager, DataManager dataManager, ServerConfigManager serverConfigManager, SessionFactory sessionFactory) { this.dbUtil = dbUtil; this.measurementManager = measurementManager; this.dataManager = dataManager; this.serverConfigManager = serverConfigManager; this.sessionFactory = sessionFactory; } /** * */ @ManagedOperation public void stop() {} /** * */ @ManagedOperation public void start() {} /** * */ @ManagedOperation public void populate() throws Exception { populate(Long.MAX_VALUE); } /** * */ @ManagedOperation public void populate(long max) throws Exception { long detailedPurgeInterval = getDetailedPurgeInterval(); String cats[] = MeasurementConstants.VALID_CATEGORIES; long start = System.currentTimeMillis(); long num = 0; log.info("Starting data populatation at " + TimeUtil.toString(start)); List<Measurement> measurements = new ArrayList<Measurement>(); for (int i = 0; i < cats.length; i++) { log.info("Loading " + cats[i] + " measurements."); List<Measurement> meas = measurementManager.findMeasurementsByCategory(cats[i]); measurements.addAll(meas); } log.info("Loaded " + measurements.size() + " measurements"); List<DataPoint> dps = new ArrayList<DataPoint>(); max = (max < measurements.size()) ? max : measurements.size(); for (int i = 0; i < max; i++ ) { Measurement m = measurements.get(i); log.info("Loaded last data point for " + m.getId()); dps.add(getLastDataPoint(m.getId())); } for (int i = 0; i < dps.size(); i++) { Measurement m = measurements.get(i); DataPoint dp = dps.get(i); if (dp == null) { continue; // No data for this metric id. } List<DataPoint> data = genData(m, dp, detailedPurgeInterval); log.info("Inserting " + data.size() + " data points"); dataManager.addData(data); num += data.size(); } long duration = System.currentTimeMillis() - start; double rate = num / (duration/1000); log.info("Inserted " + num + " metrics in " + StringUtil.formatDuration(duration) + " (" + rate + " per second)"); } private DataPoint getLastDataPoint(Integer mid) throws Exception { String table = MeasurementUnionStatementBuilder.getUnionStatement( getDetailedPurgeInterval(), mid.intValue(), (HQDialect) ((SessionFactoryImplementor) sessionFactory) .getDialect()); final String SQL = "SELECT timestamp, value FROM " + table + " WHERE measurement_id = ? AND timestamp = " + "(SELECT min(timestamp) FROM " + table + " WHERE measurement_id = ?)"; Connection conn = null; PreparedStatement stmt = null; ResultSet rs = null; try { conn = dbUtil.getConnection(); stmt = conn.prepareStatement(SQL); stmt.setInt(1, mid.intValue()); stmt.setInt(2, mid.intValue()); rs = stmt.executeQuery(); if (!rs.next()) { log.info("No metric data found for " + mid); return null; } MetricValue mv = new MetricValue(); mv.setTimestamp(rs.getLong(1)); mv.setValue(rs.getDouble(2)); return new DataPoint(mid, mv); } catch (SQLException e) { log.error("Error querying last data points", e); throw e; } finally { DBUtil.closeConnection(log, conn); } } private long getDetailedPurgeInterval() throws Exception { Properties conf = serverConfigManager.getConfig(); String purgeRawString = conf.getProperty(HQConstants.DataPurgeRaw); return Long.parseLong(purgeRawString); } private List<DataPoint> genData(Measurement dm, DataPoint dp, long range) { ArrayList<DataPoint> data = new ArrayList<DataPoint>(); long last = dp.getMetricValue().getTimestamp(); long end = System.currentTimeMillis() - range; double value = dp.getMetricValue().getValue(); while (last > end) { last = last - dm.getInterval(); MetricValue v = new MetricValue(value, last); DataPoint d = new DataPoint(dm.getId(), v); data.add(d); } return data; } }
package org.motechproject.config.service; import org.motechproject.config.core.domain.BootstrapConfig; import org.motechproject.config.core.domain.ConfigSource; import org.motechproject.config.domain.ModulePropertiesRecord; import org.motechproject.server.config.domain.MotechSettings; import org.motechproject.server.config.domain.SettingsRecord; import org.springframework.cache.annotation.CacheEvict; import org.springframework.core.io.Resource; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.Map; import java.util.Properties; /** * <p>Central configuration service that monitors and manages configurations.</p> */ public interface ConfigurationService { String SETTINGS_CACHE_NAME = "MotechSettings"; /** * <p>Loads bootstrap config that is used to start up the Motech server.</p> * <p> * The bootstrap configuration is loaded in the following order: * <ol> * <li> * Load the configuration from <code>bootstrap.properties</code> from the config * directory specified by the environment variable <code>MOTECH_CONFIG_DIR</code>. * <code>bootstrap.properties</code> contains the following properties: * <pre> * sql.url (Mandatory) * sql.driver (Mandatory) * sql.username (If required) * sql.password (If required) * config.source (Optional. Defaults to 'UI') * </pre> * An example <code>bootstrap.properties</code> is given below: * <pre> * sql.url=jdbc:mysql://localhost:3306/ * sql.driver=com.mysql.jdbc.Driver * sql.username=motech * sql.password=motech * config.source=FILE * </pre> * </li> * <li> * If <code>MOTECH_CONFIG_DIR</code> environment variable is <b>not</b> set, load the specific * configuration values from the following environment variables: * <pre> * MOTECH_SQL_URL (Mandatory) * MOTECH_SQL_DRIVER (Mandatory) * MOTECH_SQL_USERNAME (If required) * MOTECH_SQL_PASSWORD (If required) * MOTECH_CONFIG_SOURCE (Optional. Defaults to 'UI') * </pre> * </li> * <li> * If <code>MOTECH_DB_URL</code> environment is not set, load the configuration from * <code>bootstrap.properties</code> from the default MOTECH config directory specified in the file * <code>config-locations.properties</code>. * </li> * </ol> * </p> * * @return Bootstrap configuration * @throws org.motechproject.config.core.MotechConfigurationException if bootstrap configuration cannot be loaded. */ BootstrapConfig loadBootstrapConfig(); /** * <p> * Saves the given <code>BootstrapConfig</code> in the <code>bootstrap.properties</code> file located in * default MOTECH config location. The default motech config location is specified in the file * <code>config-locations.properties</code>. * </p> * * @param bootstrapConfig Bootstrap configuration. * @throws org.motechproject.config.core.MotechConfigurationException if bootstrap configuration cannot be saved. */ void save(BootstrapConfig bootstrapConfig); MotechSettings getPlatformSettings(); /** * Saves given platform settings to the settings service. Available platform settings are language, login mode, * provider name, provider URL, server URL, status message timeout, and upload size. * * @param settings the settings to be saved */ @CacheEvict(value = SETTINGS_CACHE_NAME, allEntries = true) void savePlatformSettings(Properties settings); /** * Sets given value for the platform setting with given key. * * @param key the setting name * @param value the value to be set */ @CacheEvict(value = SETTINGS_CACHE_NAME, allEntries = true) void setPlatformSetting(String key, String value); /** * Removes all cached MOTECH settings. */ @CacheEvict(value = SETTINGS_CACHE_NAME, allEntries = true) void evictMotechSettingsCache(); /** * Saves given MOTECH settings to the settings service. * * @param settings the settings to be saved */ @CacheEvict(value = SETTINGS_CACHE_NAME, allEntries = true) void savePlatformSettings(MotechSettings settings); /** * <p> * Uses current configuration and default one to find changed properties and then connects them with annotations. * Moreover creates file with non default configurations and packs is into the zip file. * </p> * * @param propertyFile name of exported file * @return FileInputStream that contains zip file * @throws IOException */ FileInputStream createZipWithConfigFiles(String propertyFile, String fileName) throws IOException; /** * <p> * This method allows to check whether MOTECH is currently running in the FILE or UI mode * </p> * * @return Current Config Source */ ConfigSource getConfigSource(); /** * <p> * Retrieves merged properties, given default set. Depending on the ConfigSource, it will either * merge default properties with the properties from DB or get properties from file. * </p> * * @param bundle The bundle we wish to retrieve properties for * @param filename Resource filename * @param defaultProperties Default properties of the bundle * @return Merged properties of the certain bundle * @throws IOException if bundle properties cannot be read from file */ Properties getBundleProperties(String bundle, String filename, Properties defaultProperties) throws IOException; /** * <p> * Depending on the config source, it will either store properties in the DB or file. * Only properties that are different from the default ones are stored. If the properties * database record or file doesn't exist yet for the given bundle, it will be created. * </p> * * @param bundle Symbolic name of updated bundle * @param version Version of updated bundle * @param filename Resource filename * @param newProperties New properties to store * @param defaultProperties Default properties of the bundle * @throws IOException if bundle properties cannot be retrieved from file */ void addOrUpdateProperties(String bundle, String version, String filename, Properties newProperties, Properties defaultProperties) throws IOException; /** * <p> * Works similar to <code>addOrUpdateProperties</code> but instead of just adding / updating properties * checks database for any deprecated properties and removes to ensure that only current ones are available * </p> * * @param bundle Symbolic name of updated bundle * @param version Version of updated bundle * @param filename Resource filename * @param newProperties New properties to store * @param defaultProperties Default properties of the bundle * @throws IOException if bundle properties cannot be retrieved from file */ void updatePropertiesAfterReinstallation(String bundle, String version, String filename, Properties defaultProperties, Properties newProperties) throws IOException; /** * <p> * Removes properties for given bundle. * </p> * @param bundle The bundle we wish to remove properties for */ void removeAllBundleProperties(String bundle); /** * Adds, updates, or deletes configurations in FILE mode only. * Files are classified as either raw config or properties based on the extension of the file.s * * @param files Files to read configuration from. */ void processExistingConfigs(List<File> files); /** * Saves both property and raw configurations in FILE mode only. * Files are classified as either raw config or properties based on the extension of the file. * * @param file File to read configuration from. */ void addOrUpdate(File file); /** * <p> * Retrieves all the bundle properties and returns them as Map, where key is the * filename. * </p> * * @param bundle The bundle we wish to retrieve properties for * @param defaultProperties Default properties of the bundle * @return Properties mapped by filename * @throws IOException if any of the bundle properties file cannot be read */ Map<String, Properties> getAllBundleProperties(String bundle, Map<String, Properties> defaultProperties) throws IOException; /** * <p> * Allows persisting of raw json properties either in the database or file, depending on the selected * ConfigSource mode. * </p> * * @param bundle Bundle we wish to save properties for * @param filename Resource filename * @param rawData Raw JSON data to persist * @throws IOException */ void saveRawConfig(final String bundle, final String version, final String filename, final InputStream rawData) throws IOException; /** * <p> * Allows to retrieve raw JSON data either from the database or file, depending on the specified * ConfigSource mode. * </p> * * @param bundle Bundle we wish to retrieve raw data for * @param filename Resource filename * @param resource Resource file containing default rawConfig, in case no other has been found * @return Raw JSON data as InputStream * @throws IOException */ InputStream getRawConfig(String bundle, String filename, Resource resource) throws IOException; /** * <p> * Allows to check if raw data has been registered for specified bundle * </p> * * @param bundle Bundle symbolic name * @param filename Resource filename * @return True if raw data exists for given parameters, false otherwise */ boolean rawConfigExists(String bundle, String filename); /** * <p> * Depending on the selected ConfigSource mode, this method looks for registered bundle properties * and returns a list of files it has found * </p> * * @return List of files with registered properties */ List<String> retrieveRegisteredBundleNames(); /** * <p> * Depending on the selected ConfigSource mode, this method looks for all registered raw data * properties within the specified bundle. * </p> * * @param bundle Bundle we wish to perform look for * @return List of filenames that register raw config for specified bundle */ List<String> listRawConfigNames(String bundle); /** * <p> * Checks if given bundle registers certain property file * </p> * * @param bundle Bundle we wish to perform check for * @param filename Resource filename * @return True if properties exist, false otherwise */ boolean registersProperties(String bundle, String filename); /** * Adds a new config location and restarts the monitor. * * @param newConfigLocation New config location */ void updateConfigLocation(String newConfigLocation); /** * Deletes the db records corresponding to the bundle with given bundle symbolic name. */ void deleteByBundle(String bundle); /** * Deletes the db record corresponding to the bundle and filename. */ void deleteByBundleAndFileName(String bundle, String filename); /** * Loads the default config for MOTECH from the resource file. * * @return default settings */ SettingsRecord loadDefaultConfig(); /** * Loads current MOTECH configuration * * @return current MOTECH settings */ SettingsRecord loadConfig(); /** * Checks whether set MOTECH configuration requires the configuraton files to be present * * @return true if files are required, false otherwise */ boolean requiresConfigurationFiles(); /** * Bulk add or update method for the Bundle Properties records. Iterates through * the passed records and either adds them, if they are not present, or updates otherwise. * * @param records a list of properties records */ void addOrUpdateBundleRecords(List<ModulePropertiesRecord> records); /** * Removes given bundle properties records * * @param records a list of properties records to remove */ void removeBundleRecords(List<ModulePropertiesRecord> records); /** * A convenient method for adding or updating the properties, which determines on its * own whether the record should be added or updated * * @param record a record to store */ void addOrUpdateBundleRecord(ModulePropertiesRecord record); }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/binaryauthorization/v1beta1/service.proto package com.google.cloud.binaryauthorization.v1beta1; /** * * * <pre> * Request message for [BinauthzManagementService.UpdateAttestor][]. * </pre> * * Protobuf type {@code google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest} */ public final class UpdateAttestorRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest) UpdateAttestorRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateAttestorRequest.newBuilder() to construct. private UpdateAttestorRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateAttestorRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateAttestorRequest(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private UpdateAttestorRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.binaryauthorization.v1beta1.Attestor.Builder subBuilder = null; if (attestor_ != null) { subBuilder = attestor_.toBuilder(); } attestor_ = input.readMessage( com.google.cloud.binaryauthorization.v1beta1.Attestor.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(attestor_); attestor_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.binaryauthorization.v1beta1.BinaryAuthorizationServiceProto .internal_static_google_cloud_binaryauthorization_v1beta1_UpdateAttestorRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.binaryauthorization.v1beta1.BinaryAuthorizationServiceProto .internal_static_google_cloud_binaryauthorization_v1beta1_UpdateAttestorRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest.class, com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest.Builder.class); } public static final int ATTESTOR_FIELD_NUMBER = 1; private com.google.cloud.binaryauthorization.v1beta1.Attestor attestor_; /** * * * <pre> * Required. The updated [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will * overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name * in the request URL, in the format `projects/&#42;&#47;attestors/&#42;`. * </pre> * * <code> * .google.cloud.binaryauthorization.v1beta1.Attestor attestor = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the attestor field is set. */ @java.lang.Override public boolean hasAttestor() { return attestor_ != null; } /** * * * <pre> * Required. The updated [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will * overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name * in the request URL, in the format `projects/&#42;&#47;attestors/&#42;`. * </pre> * * <code> * .google.cloud.binaryauthorization.v1beta1.Attestor attestor = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The attestor. */ @java.lang.Override public com.google.cloud.binaryauthorization.v1beta1.Attestor getAttestor() { return attestor_ == null ? com.google.cloud.binaryauthorization.v1beta1.Attestor.getDefaultInstance() : attestor_; } /** * * * <pre> * Required. The updated [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will * overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name * in the request URL, in the format `projects/&#42;&#47;attestors/&#42;`. * </pre> * * <code> * .google.cloud.binaryauthorization.v1beta1.Attestor attestor = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.binaryauthorization.v1beta1.AttestorOrBuilder getAttestorOrBuilder() { return getAttestor(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (attestor_ != null) { output.writeMessage(1, getAttestor()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (attestor_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getAttestor()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest)) { return super.equals(obj); } com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest other = (com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest) obj; if (hasAttestor() != other.hasAttestor()) return false; if (hasAttestor()) { if (!getAttestor().equals(other.getAttestor())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAttestor()) { hash = (37 * hash) + ATTESTOR_FIELD_NUMBER; hash = (53 * hash) + getAttestor().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for [BinauthzManagementService.UpdateAttestor][]. * </pre> * * Protobuf type {@code google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest) com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.binaryauthorization.v1beta1.BinaryAuthorizationServiceProto .internal_static_google_cloud_binaryauthorization_v1beta1_UpdateAttestorRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.binaryauthorization.v1beta1.BinaryAuthorizationServiceProto .internal_static_google_cloud_binaryauthorization_v1beta1_UpdateAttestorRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest.class, com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest.Builder.class); } // Construct using // com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (attestorBuilder_ == null) { attestor_ = null; } else { attestor_ = null; attestorBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.binaryauthorization.v1beta1.BinaryAuthorizationServiceProto .internal_static_google_cloud_binaryauthorization_v1beta1_UpdateAttestorRequest_descriptor; } @java.lang.Override public com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest getDefaultInstanceForType() { return com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest build() { com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest buildPartial() { com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest result = new com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest(this); if (attestorBuilder_ == null) { result.attestor_ = attestor_; } else { result.attestor_ = attestorBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest) { return mergeFrom( (com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest other) { if (other == com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest .getDefaultInstance()) return this; if (other.hasAttestor()) { mergeAttestor(other.getAttestor()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.cloud.binaryauthorization.v1beta1.Attestor attestor_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.binaryauthorization.v1beta1.Attestor, com.google.cloud.binaryauthorization.v1beta1.Attestor.Builder, com.google.cloud.binaryauthorization.v1beta1.AttestorOrBuilder> attestorBuilder_; /** * * * <pre> * Required. The updated [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will * overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name * in the request URL, in the format `projects/&#42;&#47;attestors/&#42;`. * </pre> * * <code> * .google.cloud.binaryauthorization.v1beta1.Attestor attestor = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the attestor field is set. */ public boolean hasAttestor() { return attestorBuilder_ != null || attestor_ != null; } /** * * * <pre> * Required. The updated [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will * overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name * in the request URL, in the format `projects/&#42;&#47;attestors/&#42;`. * </pre> * * <code> * .google.cloud.binaryauthorization.v1beta1.Attestor attestor = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The attestor. */ public com.google.cloud.binaryauthorization.v1beta1.Attestor getAttestor() { if (attestorBuilder_ == null) { return attestor_ == null ? com.google.cloud.binaryauthorization.v1beta1.Attestor.getDefaultInstance() : attestor_; } else { return attestorBuilder_.getMessage(); } } /** * * * <pre> * Required. The updated [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will * overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name * in the request URL, in the format `projects/&#42;&#47;attestors/&#42;`. * </pre> * * <code> * .google.cloud.binaryauthorization.v1beta1.Attestor attestor = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setAttestor(com.google.cloud.binaryauthorization.v1beta1.Attestor value) { if (attestorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } attestor_ = value; onChanged(); } else { attestorBuilder_.setMessage(value); } return this; } /** * * * <pre> * Required. The updated [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will * overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name * in the request URL, in the format `projects/&#42;&#47;attestors/&#42;`. * </pre> * * <code> * .google.cloud.binaryauthorization.v1beta1.Attestor attestor = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setAttestor( com.google.cloud.binaryauthorization.v1beta1.Attestor.Builder builderForValue) { if (attestorBuilder_ == null) { attestor_ = builderForValue.build(); onChanged(); } else { attestorBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Required. The updated [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will * overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name * in the request URL, in the format `projects/&#42;&#47;attestors/&#42;`. * </pre> * * <code> * .google.cloud.binaryauthorization.v1beta1.Attestor attestor = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeAttestor(com.google.cloud.binaryauthorization.v1beta1.Attestor value) { if (attestorBuilder_ == null) { if (attestor_ != null) { attestor_ = com.google.cloud.binaryauthorization.v1beta1.Attestor.newBuilder(attestor_) .mergeFrom(value) .buildPartial(); } else { attestor_ = value; } onChanged(); } else { attestorBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Required. The updated [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will * overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name * in the request URL, in the format `projects/&#42;&#47;attestors/&#42;`. * </pre> * * <code> * .google.cloud.binaryauthorization.v1beta1.Attestor attestor = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearAttestor() { if (attestorBuilder_ == null) { attestor_ = null; onChanged(); } else { attestor_ = null; attestorBuilder_ = null; } return this; } /** * * * <pre> * Required. The updated [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will * overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name * in the request URL, in the format `projects/&#42;&#47;attestors/&#42;`. * </pre> * * <code> * .google.cloud.binaryauthorization.v1beta1.Attestor attestor = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.binaryauthorization.v1beta1.Attestor.Builder getAttestorBuilder() { onChanged(); return getAttestorFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The updated [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will * overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name * in the request URL, in the format `projects/&#42;&#47;attestors/&#42;`. * </pre> * * <code> * .google.cloud.binaryauthorization.v1beta1.Attestor attestor = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.binaryauthorization.v1beta1.AttestorOrBuilder getAttestorOrBuilder() { if (attestorBuilder_ != null) { return attestorBuilder_.getMessageOrBuilder(); } else { return attestor_ == null ? com.google.cloud.binaryauthorization.v1beta1.Attestor.getDefaultInstance() : attestor_; } } /** * * * <pre> * Required. The updated [attestor][google.cloud.binaryauthorization.v1beta1.Attestor] value. The service will * overwrite the [attestor name][google.cloud.binaryauthorization.v1beta1.Attestor.name] field with the resource name * in the request URL, in the format `projects/&#42;&#47;attestors/&#42;`. * </pre> * * <code> * .google.cloud.binaryauthorization.v1beta1.Attestor attestor = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.binaryauthorization.v1beta1.Attestor, com.google.cloud.binaryauthorization.v1beta1.Attestor.Builder, com.google.cloud.binaryauthorization.v1beta1.AttestorOrBuilder> getAttestorFieldBuilder() { if (attestorBuilder_ == null) { attestorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.binaryauthorization.v1beta1.Attestor, com.google.cloud.binaryauthorization.v1beta1.Attestor.Builder, com.google.cloud.binaryauthorization.v1beta1.AttestorOrBuilder>( getAttestor(), getParentForChildren(), isClean()); attestor_ = null; } return attestorBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest) } // @@protoc_insertion_point(class_scope:google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest) private static final com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest(); } public static com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateAttestorRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateAttestorRequest>() { @java.lang.Override public UpdateAttestorRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new UpdateAttestorRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<UpdateAttestorRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateAttestorRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.binaryauthorization.v1beta1.UpdateAttestorRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: BceLeaveHall.proto package com.xinqihd.sns.gameserver.proto; public final class XinqiBceLeaveHall { private XinqiBceLeaveHall() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface BceLeaveHallOrBuilder extends com.google.protobuf.MessageOrBuilder { } public static final class BceLeaveHall extends com.google.protobuf.GeneratedMessage implements BceLeaveHallOrBuilder { // Use BceLeaveHall.newBuilder() to construct. private BceLeaveHall(Builder builder) { super(builder); } private BceLeaveHall(boolean noInit) {} private static final BceLeaveHall defaultInstance; public static BceLeaveHall getDefaultInstance() { return defaultInstance; } public BceLeaveHall getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.internal_static_com_xinqihd_sns_gameserver_proto_BceLeaveHall_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.internal_static_com_xinqihd_sns_gameserver_proto_BceLeaveHall_fieldAccessorTable; } private void initFields() { } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHallOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.internal_static_com_xinqihd_sns_gameserver_proto_BceLeaveHall_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.internal_static_com_xinqihd_sns_gameserver_proto_BceLeaveHall_fieldAccessorTable; } // Construct using com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall.getDescriptor(); } public com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall getDefaultInstanceForType() { return com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall.getDefaultInstance(); } public com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall build() { com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall buildPartial() { com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall result = new com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall(this); onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall) { return mergeFrom((com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall other) { if (other == com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall.getDefaultInstance()) return this; this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } } } } // @@protoc_insertion_point(builder_scope:com.xinqihd.sns.gameserver.proto.BceLeaveHall) } static { defaultInstance = new BceLeaveHall(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:com.xinqihd.sns.gameserver.proto.BceLeaveHall) } private static com.google.protobuf.Descriptors.Descriptor internal_static_com_xinqihd_sns_gameserver_proto_BceLeaveHall_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_com_xinqihd_sns_gameserver_proto_BceLeaveHall_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\022BceLeaveHall.proto\022 com.xinqihd.sns.ga" + "meserver.proto\"\016\n\014BceLeaveHallB\023B\021XinqiB" + "ceLeaveHall" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_com_xinqihd_sns_gameserver_proto_BceLeaveHall_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_com_xinqihd_sns_gameserver_proto_BceLeaveHall_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_com_xinqihd_sns_gameserver_proto_BceLeaveHall_descriptor, new java.lang.String[] { }, com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall.class, com.xinqihd.sns.gameserver.proto.XinqiBceLeaveHall.BceLeaveHall.Builder.class); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } // @@protoc_insertion_point(outer_class_scope) }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ode.utils; import org.apache.axis2.Constants; import org.apache.axis2.util.JavaUtils; import org.apache.axis2.client.Options; import org.apache.axis2.transport.http.HTTPConstants; import org.apache.axis2.transport.http.HttpTransportProperties; import org.apache.axis2.transport.jms.JMSConstants; import org.apache.commons.httpclient.Header; import org.apache.commons.httpclient.HttpVersion; import org.apache.commons.httpclient.ProtocolException; import org.apache.commons.httpclient.params.DefaultHttpParams; import org.apache.commons.httpclient.params.HttpConnectionParams; import org.apache.commons.httpclient.params.HttpMethodParams; import org.apache.commons.httpclient.params.HttpParams; import org.apache.commons.httpclient.params.HttpClientParams; import org.apache.commons.httpclient.params.HostParams; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.util.ArrayList; import java.util.Map; import java.util.Collection; /** * @author <a href="mailto:midon@intalio.com">Alexis Midon</a> */ public class Properties { /** * Property used to define how long (in miiliseconds) the message will wait for a response. Default value is {@link #DEFAULT_MEX_TIMEOUT} */ public static final String PROP_MEX_TIMEOUT = "mex.timeout"; // its default value public static final int DEFAULT_MEX_TIMEOUT = 30 * 1000; public static final String PROP_HTTP_CONNECTION_TIMEOUT = HttpConnectionParams.CONNECTION_TIMEOUT; public static final String PROP_HTTP_SOCKET_TIMEOUT = HttpMethodParams.SO_TIMEOUT; public static final String PROP_HTTP_PROTOCOL_VERSION = HttpMethodParams.PROTOCOL_VERSION; public static final String PROP_HTTP_HEADER_PREFIX = "http.default-headers."; public static final String PROP_HTTP_PROXY_PREFIX = "http.proxy."; public static final String PROP_HTTP_PROXY_HOST = PROP_HTTP_PROXY_PREFIX + "host"; public static final String PROP_HTTP_PROXY_PORT = PROP_HTTP_PROXY_PREFIX + "port"; public static final String PROP_HTTP_PROXY_DOMAIN = PROP_HTTP_PROXY_PREFIX + "domain"; public static final String PROP_HTTP_PROXY_USER = PROP_HTTP_PROXY_PREFIX + "user"; public static final String PROP_HTTP_PROXY_PASSWORD = PROP_HTTP_PROXY_PREFIX + "password"; /** * @deprecated use org.apache.commons.httpclient.params.HttpMethodParams#HTTP_CONTENT_CHARSET (="http.protocol.content-charset") */ public static final String PROP_HTTP_PROTOCOL_ENCODING = "http.protocol.encoding"; /** * Property to override the location set in soap:address or http:address */ public static final String PROP_ADDRESS = "address"; // Httpclient specific public static final String PROP_HTTP_MAX_REDIRECTS = HttpClientParams.MAX_REDIRECTS; // Axis2-specific public static final String PROP_HTTP_REQUEST_CHUNK = "http.request.chunk"; public static final String PROP_HTTP_REQUEST_GZIP = "http.request.gzip"; public static final String PROP_HTTP_ACCEPT_GZIP = "http.accept.gzip"; public static final String PROP_SECURITY_POLICY = "security.policy.file"; public static final String PROP_JMS_REPLY_DESTINATION = "jms.reply.destination"; public static final String PROP_JMS_REPLY_TIMEOUT = "jms.reply.timeout"; protected static final Log log = LogFactory.getLog(Properties.class); public static Object[] getProxyAndHeaders(Map<String, String> properties) { ArrayList<Header> headers = null; // /!\ Axis2 requires an ArrayList (not a List implementation) HttpTransportProperties.ProxyProperties proxy = null; for (Map.Entry<String, String> e : properties.entrySet()) { final String k = e.getKey(); final String v = e.getValue(); if (k.startsWith(PROP_HTTP_HEADER_PREFIX)) { if (headers == null) headers = new ArrayList<Header>(); // extract the header name String name = k.substring(PROP_HTTP_HEADER_PREFIX.length()); headers.add(new Header(name, v)); } else if (k.startsWith(PROP_HTTP_PROXY_PREFIX)) { if (proxy == null) proxy = new HttpTransportProperties.ProxyProperties(); if (PROP_HTTP_PROXY_HOST.equals(k)) proxy.setProxyName(v); else if (PROP_HTTP_PROXY_PORT.equals(k)) proxy.setProxyPort(Integer.parseInt(v)); else if (PROP_HTTP_PROXY_DOMAIN.equals(k)) proxy.setDomain(v); else if (PROP_HTTP_PROXY_USER.equals(k)) proxy.setUserName(v); else if (PROP_HTTP_PROXY_PASSWORD.equals(k)) proxy.setPassWord(v); else if (log.isWarnEnabled()) log.warn("Unknown proxy properties [" + k + "]. " + PROP_HTTP_PROXY_PREFIX + " is a refix reserved for proxy properties."); } } if (proxy != null) { String host = proxy.getProxyHostName(); if (host == null || host.length() == 0) { // disable proxy if the host is not null proxy = null; if (log.isDebugEnabled()) log.debug("Proxy host is null. Proxy will not be taken into account."); } } return new Object[]{proxy, headers}; } public static class Axis2 { public static Options translate(Map<String, String> properties) { return translate(properties, new Options()); } public static Options translate(Map<String, String> properties, Options options) { if (log.isDebugEnabled()) log.debug("Translating Properties for Axis2"); if (properties.isEmpty()) return options; // First set any default values to make sure they can be overwriten // set the default encoding for HttpClient (HttpClient uses ISO-8859-1 by default) options.setProperty(Constants.Configuration.CHARACTER_SET_ENCODING, "UTF-8"); /*then add all property pairs so that new properties (with string value) * are automatically handled (i.e no translation needed) */ for (Map.Entry<String, String> e : properties.entrySet()) { options.setProperty(e.getKey(), e.getValue()); } if (properties.containsKey(PROP_HTTP_CONNECTION_TIMEOUT)) { final String value = properties.get(PROP_HTTP_CONNECTION_TIMEOUT); try { options.setProperty(HTTPConstants.CONNECTION_TIMEOUT, Integer.valueOf(value)); } catch (NumberFormatException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + Properties.PROP_HTTP_CONNECTION_TIMEOUT + "=" + value + "]. Integer expected. Property will be skipped."); } } if (properties.containsKey(PROP_HTTP_SOCKET_TIMEOUT)) { final String value = properties.get(PROP_HTTP_SOCKET_TIMEOUT); try { options.setProperty(HTTPConstants.SO_TIMEOUT, Integer.valueOf(value)); } catch (NumberFormatException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + Properties.PROP_HTTP_SOCKET_TIMEOUT + "=" + value + "]. Integer expected. Property will be skipped."); } } if (properties.containsKey(PROP_HTTP_PROTOCOL_ENCODING)) { if (log.isWarnEnabled()) log.warn("Deprecated property: http.protocol.encoding. Use http.protocol.content-charset"); options.setProperty(Constants.Configuration.CHARACTER_SET_ENCODING, properties.get(PROP_HTTP_PROTOCOL_ENCODING)); } if (properties.containsKey(HttpMethodParams.HTTP_CONTENT_CHARSET)) { options.setProperty(Constants.Configuration.CHARACTER_SET_ENCODING, properties.get(HttpMethodParams.HTTP_CONTENT_CHARSET)); } if (properties.containsKey(PROP_HTTP_PROTOCOL_VERSION)) { options.setProperty(HTTPConstants.HTTP_PROTOCOL_VERSION, properties.get(PROP_HTTP_PROTOCOL_VERSION)); } if (properties.containsKey(PROP_HTTP_REQUEST_CHUNK)) { options.setProperty(HTTPConstants.CHUNKED, properties.get(PROP_HTTP_REQUEST_CHUNK)); } if (properties.containsKey(PROP_HTTP_REQUEST_GZIP)) { options.setProperty(HTTPConstants.MC_GZIP_REQUEST, properties.get(PROP_HTTP_REQUEST_GZIP)); } if (properties.containsKey(PROP_HTTP_ACCEPT_GZIP)) { options.setProperty(HTTPConstants.MC_ACCEPT_GZIP, properties.get(PROP_HTTP_ACCEPT_GZIP)); } if (properties.containsKey(PROP_HTTP_MAX_REDIRECTS)) { if (log.isWarnEnabled()) log.warn("Property Not Supported: " + PROP_HTTP_MAX_REDIRECTS); } if (properties.containsKey(PROP_JMS_REPLY_DESTINATION)) { options.setProperty(JMSConstants.REPLY_PARAM, properties.get(PROP_JMS_REPLY_DESTINATION)); } if (properties.containsKey(PROP_JMS_REPLY_TIMEOUT)) { String value = properties.get(PROP_JMS_REPLY_TIMEOUT); try { options.setProperty(JMSConstants.JMS_WAIT_REPLY, Long.valueOf(value)); } catch (NumberFormatException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + Properties.PROP_JMS_REPLY_TIMEOUT + "=" + value + "]. Long expected. Property will be skipped."); } } // iterate through the properties to get Headers & Proxy information Object[] o = getProxyAndHeaders(properties); HttpTransportProperties.ProxyProperties proxy = (HttpTransportProperties.ProxyProperties) o[0]; ArrayList<Header> headers = (ArrayList<Header>) o[1]; // /!\ Axis2 requires an ArrayList (not a List implementation) if (headers != null && !headers.isEmpty()) options.setProperty(HTTPConstants.HTTP_HEADERS, headers); if (proxy != null) options.setProperty(HTTPConstants.PROXY, proxy); // Set properties that canNOT be overridden if(JavaUtils.isTrueExplicitly(options.getProperty(HTTPConstants.REUSE_HTTP_CLIENT))){ if (log.isWarnEnabled()) log.warn("This property cannot be overidden, and must always be false. "+ HTTPConstants.REUSE_HTTP_CLIENT); } options.setProperty(HTTPConstants.REUSE_HTTP_CLIENT, "false"); return options; } } public static class HttpClient { public static HttpParams translate(Map<String, String> properties) { return translate(properties, new DefaultHttpParams()); } public static HttpParams translate(Map<String, String> properties, HttpParams p) { if (log.isDebugEnabled()) log.debug("Translating Properties for HttpClient. Properties size=" + properties.size()); if (properties.isEmpty()) return p; // First set any default values to make sure they can be overwriten // set the default encoding for HttpClient (HttpClient uses ISO-8859-1 by default) p.setParameter(HttpMethodParams.HTTP_CONTENT_CHARSET, "UTF-8"); /*then all property pairs so that new properties (with string value) * are automatically handled (i.e no translation needed) */ for (Map.Entry<String, String> e : properties.entrySet()) { p.setParameter(e.getKey(), e.getValue()); } // initialize the collection of headers p.setParameter(HostParams.DEFAULT_HEADERS, new ArrayList()); if (properties.containsKey(PROP_HTTP_CONNECTION_TIMEOUT)) { final String value = properties.get(PROP_HTTP_CONNECTION_TIMEOUT); try { p.setParameter(HttpConnectionParams.CONNECTION_TIMEOUT, Integer.valueOf(value)); } catch (NumberFormatException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + Properties.PROP_HTTP_CONNECTION_TIMEOUT + "=" + value + "] Property will be skipped."); } } if (properties.containsKey(PROP_HTTP_SOCKET_TIMEOUT)) { final String value = properties.get(PROP_HTTP_SOCKET_TIMEOUT); try { p.setParameter(HttpMethodParams.SO_TIMEOUT, Integer.valueOf(value)); } catch (NumberFormatException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + Properties.PROP_HTTP_SOCKET_TIMEOUT + "=" + value + "] Property will be skipped."); } } if (properties.containsKey(PROP_HTTP_PROTOCOL_ENCODING)) { if (log.isWarnEnabled()) log.warn("Deprecated property: http.protocol.encoding. Use http.protocol.content-charset"); p.setParameter(HttpMethodParams.HTTP_CONTENT_CHARSET, properties.get(PROP_HTTP_PROTOCOL_ENCODING)); } // the next one is redundant because HttpMethodParams.HTTP_CONTENT_CHARSET accepts a string and we use the same property name // so the property has already been added. if (properties.containsKey(HttpMethodParams.HTTP_CONTENT_CHARSET)) { p.setParameter(HttpMethodParams.HTTP_CONTENT_CHARSET, properties.get(HttpMethodParams.HTTP_CONTENT_CHARSET)); } if (properties.containsKey(PROP_HTTP_PROTOCOL_VERSION)) { try { p.setParameter(HttpMethodParams.PROTOCOL_VERSION, HttpVersion.parse(properties.get(PROP_HTTP_PROTOCOL_VERSION))); } catch (ProtocolException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + PROP_HTTP_PROTOCOL_VERSION + "]", e); } } if (properties.containsKey(PROP_HTTP_REQUEST_CHUNK)) { // see org.apache.commons.httpclient.methods.EntityEnclosingMethod.setContentChunked() p.setBooleanParameter(PROP_HTTP_REQUEST_CHUNK, Boolean.parseBoolean(properties.get(PROP_HTTP_REQUEST_CHUNK))); } if (properties.containsKey(PROP_HTTP_REQUEST_GZIP)) { if (log.isWarnEnabled()) log.warn("Property not supported by HTTP External Services: " + PROP_HTTP_REQUEST_GZIP); } if (Boolean.parseBoolean(properties.get(PROP_HTTP_ACCEPT_GZIP))) { // append gzip to the list of accepted encoding // HttpClient does not support compression natively // Additional code would be necessary to handle it. // ((Collection) p.getParameter(HostParams.DEFAULT_HEADERS)).add(new Header("Accept-Encoding", "gzip")); if (log.isWarnEnabled()) log.warn("Property not supported by HTTP External Services: " + PROP_HTTP_ACCEPT_GZIP); } if (properties.containsKey(PROP_HTTP_MAX_REDIRECTS)) { final String value = properties.get(PROP_HTTP_MAX_REDIRECTS); try { p.setParameter(HttpClientParams.MAX_REDIRECTS, Integer.valueOf(value)); } catch (NumberFormatException e) { if (log.isWarnEnabled()) log.warn("Mal-formatted Property: [" + Properties.PROP_HTTP_MAX_REDIRECTS + "=" + value + "] Property will be skipped."); } } Object[] o = getProxyAndHeaders(properties); HttpTransportProperties.ProxyProperties proxy = (HttpTransportProperties.ProxyProperties) o[0]; Collection headers = (Collection) o[1]; if (headers != null && !headers.isEmpty()) ((Collection) p.getParameter(HostParams.DEFAULT_HEADERS)).addAll(headers); if (proxy != null) p.setParameter(PROP_HTTP_PROXY_PREFIX, proxy); return new UnmodifiableHttpParams(p); } static class UnmodifiableHttpParams implements HttpParams { final HttpParams p; private UnmodifiableHttpParams(HttpParams p) { this.p = p; } public void setBooleanParameter(String name, boolean value) { throw new UnsupportedOperationException(); } public void setDefaults(HttpParams params) { throw new UnsupportedOperationException(); } public void setDoubleParameter(String name, double value) { throw new UnsupportedOperationException(); } public void setIntParameter(String name, int value) { throw new UnsupportedOperationException(); } public void setLongParameter(String name, long value) { throw new UnsupportedOperationException(); } public void setParameter(String name, Object value) { throw new UnsupportedOperationException(); } public boolean getBooleanParameter(String name, boolean defaultValue) { return p.getBooleanParameter(name, defaultValue); } public HttpParams getDefaults() { return null; } public double getDoubleParameter(String name, double defaultValue) { return p.getDoubleParameter(name, defaultValue); } public int getIntParameter(String name, int defaultValue) { return p.getIntParameter(name, defaultValue); } public long getLongParameter(String name, long defaultValue) { return p.getLongParameter(name, defaultValue); } public Object getParameter(String name) { return p.getParameter(name); } public boolean isParameterFalse(String name) { return p.isParameterFalse(name); } public boolean isParameterSet(String name) { return p.isParameterSet(name); } public boolean isParameterSetLocally(String name) { return p.isParameterSetLocally(name); } public boolean isParameterTrue(String name) { return p.isParameterTrue(name); } } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.storage.v2019_06_01.implementation; import com.microsoft.azure.management.storage.v2019_06_01.Kind; import com.microsoft.azure.management.storage.v2019_06_01.Identity; import com.microsoft.azure.management.storage.v2019_06_01.ProvisioningState; import com.microsoft.azure.management.storage.v2019_06_01.Endpoints; import com.microsoft.azure.management.storage.v2019_06_01.AccountStatus; import org.joda.time.DateTime; import com.microsoft.azure.management.storage.v2019_06_01.CustomDomain; import com.microsoft.azure.management.storage.v2019_06_01.Encryption; import com.microsoft.azure.management.storage.v2019_06_01.AccessTier; import com.microsoft.azure.management.storage.v2019_06_01.AzureFilesIdentityBasedAuthentication; import com.microsoft.azure.management.storage.v2019_06_01.NetworkRuleSet; import com.microsoft.azure.management.storage.v2019_06_01.GeoReplicationStats; import com.microsoft.azure.management.storage.v2019_06_01.LargeFileSharesState; import java.util.List; import com.microsoft.azure.management.storage.v2019_06_01.RoutingPreference; import com.fasterxml.jackson.annotation.JsonProperty; import com.microsoft.rest.serializer.JsonFlatten; import com.microsoft.azure.Resource; /** * The storage account. */ @JsonFlatten public class StorageAccountInner extends Resource { /** * Gets the SKU. */ @JsonProperty(value = "sku", access = JsonProperty.Access.WRITE_ONLY) private SkuInner sku; /** * Gets the Kind. Possible values include: 'Storage', 'StorageV2', * 'BlobStorage', 'FileStorage', 'BlockBlobStorage'. */ @JsonProperty(value = "kind", access = JsonProperty.Access.WRITE_ONLY) private Kind kind; /** * The identity of the resource. */ @JsonProperty(value = "identity") private Identity identity; /** * Gets the status of the storage account at the time the operation was * called. Possible values include: 'Creating', 'ResolvingDNS', * 'Succeeded'. */ @JsonProperty(value = "properties.provisioningState", access = JsonProperty.Access.WRITE_ONLY) private ProvisioningState provisioningState; /** * Gets the URLs that are used to perform a retrieval of a public blob, * queue, or table object. Note that Standard_ZRS and Premium_LRS accounts * only return the blob endpoint. */ @JsonProperty(value = "properties.primaryEndpoints", access = JsonProperty.Access.WRITE_ONLY) private Endpoints primaryEndpoints; /** * Gets the location of the primary data center for the storage account. */ @JsonProperty(value = "properties.primaryLocation", access = JsonProperty.Access.WRITE_ONLY) private String primaryLocation; /** * Gets the status indicating whether the primary location of the storage * account is available or unavailable. Possible values include: * 'available', 'unavailable'. */ @JsonProperty(value = "properties.statusOfPrimary", access = JsonProperty.Access.WRITE_ONLY) private AccountStatus statusOfPrimary; /** * Gets the timestamp of the most recent instance of a failover to the * secondary location. Only the most recent timestamp is retained. This * element is not returned if there has never been a failover instance. * Only available if the accountType is Standard_GRS or Standard_RAGRS. */ @JsonProperty(value = "properties.lastGeoFailoverTime", access = JsonProperty.Access.WRITE_ONLY) private DateTime lastGeoFailoverTime; /** * Gets the location of the geo-replicated secondary for the storage * account. Only available if the accountType is Standard_GRS or * Standard_RAGRS. */ @JsonProperty(value = "properties.secondaryLocation", access = JsonProperty.Access.WRITE_ONLY) private String secondaryLocation; /** * Gets the status indicating whether the secondary location of the storage * account is available or unavailable. Only available if the SKU name is * Standard_GRS or Standard_RAGRS. Possible values include: 'available', * 'unavailable'. */ @JsonProperty(value = "properties.statusOfSecondary", access = JsonProperty.Access.WRITE_ONLY) private AccountStatus statusOfSecondary; /** * Gets the creation date and time of the storage account in UTC. */ @JsonProperty(value = "properties.creationTime", access = JsonProperty.Access.WRITE_ONLY) private DateTime creationTime; /** * Gets the custom domain the user assigned to this storage account. */ @JsonProperty(value = "properties.customDomain", access = JsonProperty.Access.WRITE_ONLY) private CustomDomain customDomain; /** * Gets the URLs that are used to perform a retrieval of a public blob, * queue, or table object from the secondary location of the storage * account. Only available if the SKU name is Standard_RAGRS. */ @JsonProperty(value = "properties.secondaryEndpoints", access = JsonProperty.Access.WRITE_ONLY) private Endpoints secondaryEndpoints; /** * Gets the encryption settings on the account. If unspecified, the account * is unencrypted. */ @JsonProperty(value = "properties.encryption", access = JsonProperty.Access.WRITE_ONLY) private Encryption encryption; /** * Required for storage accounts where kind = BlobStorage. The access tier * used for billing. Possible values include: 'Hot', 'Cool'. */ @JsonProperty(value = "properties.accessTier", access = JsonProperty.Access.WRITE_ONLY) private AccessTier accessTier; /** * Provides the identity based authentication settings for Azure Files. */ @JsonProperty(value = "properties.azureFilesIdentityBasedAuthentication") private AzureFilesIdentityBasedAuthentication azureFilesIdentityBasedAuthentication; /** * Allows https traffic only to storage service if sets to true. */ @JsonProperty(value = "properties.supportsHttpsTrafficOnly") private Boolean enableHttpsTrafficOnly; /** * Network rule set. */ @JsonProperty(value = "properties.networkAcls", access = JsonProperty.Access.WRITE_ONLY) private NetworkRuleSet networkRuleSet; /** * Account HierarchicalNamespace enabled if sets to true. */ @JsonProperty(value = "properties.isHnsEnabled") private Boolean isHnsEnabled; /** * Geo Replication Stats. */ @JsonProperty(value = "properties.geoReplicationStats", access = JsonProperty.Access.WRITE_ONLY) private GeoReplicationStats geoReplicationStats; /** * If the failover is in progress, the value will be true, otherwise, it * will be null. */ @JsonProperty(value = "properties.failoverInProgress", access = JsonProperty.Access.WRITE_ONLY) private Boolean failoverInProgress; /** * Allow large file shares if sets to Enabled. It cannot be disabled once * it is enabled. Possible values include: 'Disabled', 'Enabled'. */ @JsonProperty(value = "properties.largeFileSharesState") private LargeFileSharesState largeFileSharesState; /** * List of private endpoint connection associated with the specified * storage account. */ @JsonProperty(value = "properties.privateEndpointConnections", access = JsonProperty.Access.WRITE_ONLY) private List<PrivateEndpointConnectionInner> privateEndpointConnections; /** * Maintains information about the network routing choice opted by the user * for data transfer. */ @JsonProperty(value = "properties.routingPreference") private RoutingPreference routingPreference; /** * Get gets the SKU. * * @return the sku value */ public SkuInner sku() { return this.sku; } /** * Get gets the Kind. Possible values include: 'Storage', 'StorageV2', 'BlobStorage', 'FileStorage', 'BlockBlobStorage'. * * @return the kind value */ public Kind kind() { return this.kind; } /** * Get the identity of the resource. * * @return the identity value */ public Identity identity() { return this.identity; } /** * Set the identity of the resource. * * @param identity the identity value to set * @return the StorageAccountInner object itself. */ public StorageAccountInner withIdentity(Identity identity) { this.identity = identity; return this; } /** * Get gets the status of the storage account at the time the operation was called. Possible values include: 'Creating', 'ResolvingDNS', 'Succeeded'. * * @return the provisioningState value */ public ProvisioningState provisioningState() { return this.provisioningState; } /** * Get gets the URLs that are used to perform a retrieval of a public blob, queue, or table object. Note that Standard_ZRS and Premium_LRS accounts only return the blob endpoint. * * @return the primaryEndpoints value */ public Endpoints primaryEndpoints() { return this.primaryEndpoints; } /** * Get gets the location of the primary data center for the storage account. * * @return the primaryLocation value */ public String primaryLocation() { return this.primaryLocation; } /** * Get gets the status indicating whether the primary location of the storage account is available or unavailable. Possible values include: 'available', 'unavailable'. * * @return the statusOfPrimary value */ public AccountStatus statusOfPrimary() { return this.statusOfPrimary; } /** * Get gets the timestamp of the most recent instance of a failover to the secondary location. Only the most recent timestamp is retained. This element is not returned if there has never been a failover instance. Only available if the accountType is Standard_GRS or Standard_RAGRS. * * @return the lastGeoFailoverTime value */ public DateTime lastGeoFailoverTime() { return this.lastGeoFailoverTime; } /** * Get gets the location of the geo-replicated secondary for the storage account. Only available if the accountType is Standard_GRS or Standard_RAGRS. * * @return the secondaryLocation value */ public String secondaryLocation() { return this.secondaryLocation; } /** * Get gets the status indicating whether the secondary location of the storage account is available or unavailable. Only available if the SKU name is Standard_GRS or Standard_RAGRS. Possible values include: 'available', 'unavailable'. * * @return the statusOfSecondary value */ public AccountStatus statusOfSecondary() { return this.statusOfSecondary; } /** * Get gets the creation date and time of the storage account in UTC. * * @return the creationTime value */ public DateTime creationTime() { return this.creationTime; } /** * Get gets the custom domain the user assigned to this storage account. * * @return the customDomain value */ public CustomDomain customDomain() { return this.customDomain; } /** * Get gets the URLs that are used to perform a retrieval of a public blob, queue, or table object from the secondary location of the storage account. Only available if the SKU name is Standard_RAGRS. * * @return the secondaryEndpoints value */ public Endpoints secondaryEndpoints() { return this.secondaryEndpoints; } /** * Get gets the encryption settings on the account. If unspecified, the account is unencrypted. * * @return the encryption value */ public Encryption encryption() { return this.encryption; } /** * Get required for storage accounts where kind = BlobStorage. The access tier used for billing. Possible values include: 'Hot', 'Cool'. * * @return the accessTier value */ public AccessTier accessTier() { return this.accessTier; } /** * Get provides the identity based authentication settings for Azure Files. * * @return the azureFilesIdentityBasedAuthentication value */ public AzureFilesIdentityBasedAuthentication azureFilesIdentityBasedAuthentication() { return this.azureFilesIdentityBasedAuthentication; } /** * Set provides the identity based authentication settings for Azure Files. * * @param azureFilesIdentityBasedAuthentication the azureFilesIdentityBasedAuthentication value to set * @return the StorageAccountInner object itself. */ public StorageAccountInner withAzureFilesIdentityBasedAuthentication(AzureFilesIdentityBasedAuthentication azureFilesIdentityBasedAuthentication) { this.azureFilesIdentityBasedAuthentication = azureFilesIdentityBasedAuthentication; return this; } /** * Get allows https traffic only to storage service if sets to true. * * @return the enableHttpsTrafficOnly value */ public Boolean enableHttpsTrafficOnly() { return this.enableHttpsTrafficOnly; } /** * Set allows https traffic only to storage service if sets to true. * * @param enableHttpsTrafficOnly the enableHttpsTrafficOnly value to set * @return the StorageAccountInner object itself. */ public StorageAccountInner withEnableHttpsTrafficOnly(Boolean enableHttpsTrafficOnly) { this.enableHttpsTrafficOnly = enableHttpsTrafficOnly; return this; } /** * Get network rule set. * * @return the networkRuleSet value */ public NetworkRuleSet networkRuleSet() { return this.networkRuleSet; } /** * Get account HierarchicalNamespace enabled if sets to true. * * @return the isHnsEnabled value */ public Boolean isHnsEnabled() { return this.isHnsEnabled; } /** * Set account HierarchicalNamespace enabled if sets to true. * * @param isHnsEnabled the isHnsEnabled value to set * @return the StorageAccountInner object itself. */ public StorageAccountInner withIsHnsEnabled(Boolean isHnsEnabled) { this.isHnsEnabled = isHnsEnabled; return this; } /** * Get geo Replication Stats. * * @return the geoReplicationStats value */ public GeoReplicationStats geoReplicationStats() { return this.geoReplicationStats; } /** * Get if the failover is in progress, the value will be true, otherwise, it will be null. * * @return the failoverInProgress value */ public Boolean failoverInProgress() { return this.failoverInProgress; } /** * Get allow large file shares if sets to Enabled. It cannot be disabled once it is enabled. Possible values include: 'Disabled', 'Enabled'. * * @return the largeFileSharesState value */ public LargeFileSharesState largeFileSharesState() { return this.largeFileSharesState; } /** * Set allow large file shares if sets to Enabled. It cannot be disabled once it is enabled. Possible values include: 'Disabled', 'Enabled'. * * @param largeFileSharesState the largeFileSharesState value to set * @return the StorageAccountInner object itself. */ public StorageAccountInner withLargeFileSharesState(LargeFileSharesState largeFileSharesState) { this.largeFileSharesState = largeFileSharesState; return this; } /** * Get list of private endpoint connection associated with the specified storage account. * * @return the privateEndpointConnections value */ public List<PrivateEndpointConnectionInner> privateEndpointConnections() { return this.privateEndpointConnections; } /** * Get maintains information about the network routing choice opted by the user for data transfer. * * @return the routingPreference value */ public RoutingPreference routingPreference() { return this.routingPreference; } /** * Set maintains information about the network routing choice opted by the user for data transfer. * * @param routingPreference the routingPreference value to set * @return the StorageAccountInner object itself. */ public StorageAccountInner withRoutingPreference(RoutingPreference routingPreference) { this.routingPreference = routingPreference; return this; } }
// Copyright 2014 Cloudera Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.cloudera.impala.analysis; import java.math.BigDecimal; import com.cloudera.impala.common.AnalysisException; import com.cloudera.impala.common.InternalException; import com.cloudera.impala.service.FeSupport; import com.cloudera.impala.thrift.TAnalyticWindow; import com.cloudera.impala.thrift.TAnalyticWindowBoundary; import com.cloudera.impala.thrift.TAnalyticWindowBoundaryType; import com.cloudera.impala.thrift.TAnalyticWindowType; import com.cloudera.impala.thrift.TColumnValue; import com.cloudera.impala.util.TColumnValueUtil; import com.google.common.base.Preconditions; /** * Windowing clause of an analytic expr * Both left and right boundaries are always non-null after analyze(). */ public class AnalyticWindow { // default window used when an analytic expr was given an order by but no window public static final AnalyticWindow DEFAULT_WINDOW = new AnalyticWindow(Type.RANGE, new Boundary(BoundaryType.UNBOUNDED_PRECEDING, null), new Boundary(BoundaryType.CURRENT_ROW, null)); enum Type { ROWS("ROWS"), RANGE("RANGE"); private final String description_; private Type(String d) { description_ = d; } @Override public String toString() { return description_; } public TAnalyticWindowType toThrift() { return this == ROWS ? TAnalyticWindowType.ROWS : TAnalyticWindowType.RANGE; } } enum BoundaryType { UNBOUNDED_PRECEDING("UNBOUNDED PRECEDING"), UNBOUNDED_FOLLOWING("UNBOUNDED FOLLOWING"), CURRENT_ROW("CURRENT ROW"), PRECEDING("PRECEDING"), FOLLOWING("FOLLOWING"); private final String description_; private BoundaryType(String d) { description_ = d; } @Override public String toString() { return description_; } public TAnalyticWindowBoundaryType toThrift() { Preconditions.checkState(!isAbsolutePos()); if (this == CURRENT_ROW) { return TAnalyticWindowBoundaryType.CURRENT_ROW; } else if (this == PRECEDING) { return TAnalyticWindowBoundaryType.PRECEDING; } else if (this == FOLLOWING) { return TAnalyticWindowBoundaryType.FOLLOWING; } return null; } public boolean isAbsolutePos() { return this == UNBOUNDED_PRECEDING || this == UNBOUNDED_FOLLOWING; } public boolean isOffset() { return this == PRECEDING || this == FOLLOWING; } public boolean isPreceding() { return this == UNBOUNDED_PRECEDING || this == PRECEDING; } public boolean isFollowing() { return this == UNBOUNDED_FOLLOWING || this == FOLLOWING; } public BoundaryType converse() { switch (this) { case UNBOUNDED_PRECEDING: return UNBOUNDED_FOLLOWING; case UNBOUNDED_FOLLOWING: return UNBOUNDED_PRECEDING; case PRECEDING: return FOLLOWING; case FOLLOWING: return PRECEDING; default: return CURRENT_ROW; } } } public static class Boundary { private final BoundaryType type_; // Offset expr. Only set for PRECEDING/FOLLOWING. Needed for toSql(). private final Expr expr_; // The offset value. Set during analysis after evaluating expr_. Integral valued // for ROWS windows. private BigDecimal offsetValue_; public BoundaryType getType() { return type_; } public Expr getExpr() { return expr_; } public Boundary(BoundaryType type, Expr e) { this(type, e, null); } // c'tor used by clone() private Boundary(BoundaryType type, Expr e, BigDecimal offsetValue) { Preconditions.checkState( (type.isOffset() && e != null) || (!type.isOffset() && e == null)); type_ = type; expr_ = e; offsetValue_ = offsetValue; } public String toSql() { StringBuilder sb = new StringBuilder(); if (expr_ != null) sb.append(expr_.toSql()).append(" "); sb.append(type_.toString()); return sb.toString(); } public TAnalyticWindowBoundary toThrift(Type windowType) { TAnalyticWindowBoundary result = new TAnalyticWindowBoundary(type_.toThrift()); if (type_.isOffset() && windowType == Type.ROWS) { result.setRows_offset_value(offsetValue_.longValue()); } // TODO: range windows need range_offset_predicate return result; } @Override public boolean equals(Object obj) { if (obj == null) return false; if (obj.getClass() != this.getClass()) return false; Boundary o = (Boundary)obj; boolean exprEqual = (expr_ == null) == (o.expr_ == null); if (exprEqual && expr_ != null) exprEqual = expr_.equals(o.expr_); return type_ == o.type_ && exprEqual; } public Boundary converse() { Boundary result = new Boundary(type_.converse(), (expr_ != null) ? expr_.clone() : null); result.offsetValue_ = offsetValue_; return result; } @Override public Boundary clone() { return new Boundary(type_, expr_ != null ? expr_.clone() : null, offsetValue_); } public void analyze(Analyzer analyzer) throws AnalysisException { if (expr_ != null) expr_.analyze(analyzer); } } private final Type type_; private final Boundary leftBoundary_; private Boundary rightBoundary_; // may be null before analyze() private String toSqlString_; // cached after analysis public Type getType() { return type_; } public Boundary getLeftBoundary() { return leftBoundary_; } public Boundary getRightBoundary() { return rightBoundary_; } public Boundary setRightBoundary(Boundary b) { return rightBoundary_ = b; } public AnalyticWindow(Type type, Boundary b) { type_ = type; Preconditions.checkNotNull(b); leftBoundary_ = b; rightBoundary_ = null; } public AnalyticWindow(Type type, Boundary l, Boundary r) { type_ = type; Preconditions.checkNotNull(l); leftBoundary_ = l; Preconditions.checkNotNull(r); rightBoundary_ = r; } /** * Clone c'tor */ private AnalyticWindow(AnalyticWindow other) { type_ = other.type_; Preconditions.checkNotNull(other.leftBoundary_); leftBoundary_ = other.leftBoundary_.clone(); if (other.rightBoundary_ != null) { rightBoundary_ = other.rightBoundary_.clone(); } toSqlString_ = other.toSqlString_; // safe to share } public AnalyticWindow reverse() { Boundary newRightBoundary = leftBoundary_.converse(); Boundary newLeftBoundary = null; if (rightBoundary_ == null) { newLeftBoundary = new Boundary(leftBoundary_.getType(), null); } else { newLeftBoundary = rightBoundary_.converse(); } return new AnalyticWindow(type_, newLeftBoundary, newRightBoundary); } public String toSql() { if (toSqlString_ != null) return toSqlString_; StringBuilder sb = new StringBuilder(); sb.append(type_.toString()).append(" "); if (rightBoundary_ == null) { sb.append(leftBoundary_.toSql()); } else { sb.append("BETWEEN ").append(leftBoundary_.toSql()).append(" AND "); sb.append(rightBoundary_.toSql()); } return sb.toString(); } public TAnalyticWindow toThrift() { TAnalyticWindow result = new TAnalyticWindow(type_.toThrift()); if (leftBoundary_.getType() != BoundaryType.UNBOUNDED_PRECEDING) { result.setWindow_start(leftBoundary_.toThrift(type_)); } Preconditions.checkNotNull(rightBoundary_); if (rightBoundary_.getType() != BoundaryType.UNBOUNDED_FOLLOWING) { result.setWindow_end(rightBoundary_.toThrift(type_)); } return result; } @Override public boolean equals(Object obj) { if (obj == null) return false; if (obj.getClass() != this.getClass()) return false; AnalyticWindow o = (AnalyticWindow)obj; boolean rightBoundaryEqual = (rightBoundary_ == null) == (o.rightBoundary_ == null); if (rightBoundaryEqual && rightBoundary_ != null) { rightBoundaryEqual = rightBoundary_.equals(o.rightBoundary_); } return type_ == o.type_ && leftBoundary_.equals(o.leftBoundary_) && rightBoundaryEqual; } @Override public AnalyticWindow clone() { return new AnalyticWindow(this); } /** * Semantic analysis for expr of a PRECEDING/FOLLOWING clause. */ private void checkOffsetExpr(Analyzer analyzer, Boundary boundary) throws AnalysisException { Preconditions.checkState(boundary.getType().isOffset()); Expr e = boundary.getExpr(); Preconditions.checkNotNull(e); boolean isPos = true; Double val = null; if (e.isConstant() && e.getType().isNumericType()) { try { val = TColumnValueUtil.getNumericVal( FeSupport.EvalConstExpr(e, analyzer.getQueryCtx())); if (val <= 0) isPos = false; } catch (InternalException exc) { throw new AnalysisException( "Couldn't evaluate PRECEDING/FOLLOWING expression: " + exc.getMessage()); } } if (type_ == Type.ROWS) { if (!e.isConstant() || !e.getType().isIntegerType() || !isPos) { throw new AnalysisException( "For ROWS window, the value of a PRECEDING/FOLLOWING offset must be a " + "constant positive integer: " + boundary.toSql()); } Preconditions.checkNotNull(val); boundary.offsetValue_ = new BigDecimal(val.longValue()); } else { if (!e.isConstant() || !e.getType().isNumericType() || !isPos) { throw new AnalysisException( "For RANGE window, the value of a PRECEDING/FOLLOWING offset must be a " + "constant positive number: " + boundary.toSql()); } boundary.offsetValue_ = new BigDecimal(val); } } /** * Check that b1 <= b2. */ private void checkOffsetBoundaries(Analyzer analyzer, Boundary b1, Boundary b2) throws AnalysisException { Preconditions.checkState(b1.getType().isOffset()); Preconditions.checkState(b2.getType().isOffset()); Expr e1 = b1.getExpr(); Preconditions.checkState( e1 != null && e1.isConstant() && e1.getType().isNumericType()); Expr e2 = b2.getExpr(); Preconditions.checkState( e2 != null && e2.isConstant() && e2.getType().isNumericType()); try { TColumnValue val1 = FeSupport.EvalConstExpr(e1, analyzer.getQueryCtx()); TColumnValue val2 = FeSupport.EvalConstExpr(e2, analyzer.getQueryCtx()); double left = TColumnValueUtil.getNumericVal(val1); double right = TColumnValueUtil.getNumericVal(val2); if (left > right) { throw new AnalysisException( "Offset boundaries are in the wrong order: " + toSql()); } } catch (InternalException exc) { throw new AnalysisException( "Couldn't evaluate PRECEDING/FOLLOWING expression: " + exc.getMessage()); } } public void analyze(Analyzer analyzer) throws AnalysisException { leftBoundary_.analyze(analyzer); if (rightBoundary_ != null) rightBoundary_.analyze(analyzer); if (leftBoundary_.getType() == BoundaryType.UNBOUNDED_FOLLOWING) { throw new AnalysisException( leftBoundary_.getType().toString() + " is only allowed for upper bound of " + "BETWEEN"); } if (rightBoundary_ != null && rightBoundary_.getType() == BoundaryType.UNBOUNDED_PRECEDING) { throw new AnalysisException( rightBoundary_.getType().toString() + " is only allowed for lower bound of " + "BETWEEN"); } if (rightBoundary_ == null && leftBoundary_.getType() == BoundaryType.FOLLOWING) { throw new AnalysisException( leftBoundary_.getType().toString() + " requires a BETWEEN clause"); } if (leftBoundary_.getType().isOffset()) checkOffsetExpr(analyzer, leftBoundary_); if (rightBoundary_ == null) { // set right boundary to implied value, but make sure to cache toSql string // beforehand toSqlString_ = toSql(); rightBoundary_ = new Boundary(BoundaryType.CURRENT_ROW, null); return; } if (rightBoundary_.getType().isOffset()) checkOffsetExpr(analyzer, rightBoundary_); if (leftBoundary_.getType() == BoundaryType.FOLLOWING) { if (rightBoundary_.getType() != BoundaryType.FOLLOWING && rightBoundary_.getType() != BoundaryType.UNBOUNDED_FOLLOWING) { throw new AnalysisException( "A lower window bound of " + BoundaryType.FOLLOWING.toString() + " requires that the upper bound also be " + BoundaryType.FOLLOWING.toString()); } if (rightBoundary_.getType() != BoundaryType.UNBOUNDED_FOLLOWING) { checkOffsetBoundaries(analyzer, leftBoundary_, rightBoundary_); } } if (rightBoundary_.getType() == BoundaryType.PRECEDING) { if (leftBoundary_.getType() != BoundaryType.PRECEDING && leftBoundary_.getType() != BoundaryType.UNBOUNDED_PRECEDING) { throw new AnalysisException( "An upper window bound of " + BoundaryType.PRECEDING.toString() + " requires that the lower bound also be " + BoundaryType.PRECEDING.toString()); } if (leftBoundary_.getType() != BoundaryType.UNBOUNDED_PRECEDING) { checkOffsetBoundaries(analyzer, rightBoundary_, leftBoundary_); } } } }
package itu.abc4gsd.rcp.client_v6.logic; //import socket //from threading import Thread //import os //import copy //import time //import subprocess //import json, zmq // //import library.utils.utils as UT //import library.constants as CO import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; import itu.abc4gsd.rcp.client_v6.Activator; import itu.abc4gsd.rcp.client_v6.logic.Constants; import itu.abc4gsd.rcp.client_v6.logic.Utils; import itu.abc4gsd.rcp.client_v6.preferences.ConnectionAdvanced; import itu.abc4gsd.rcp.client_v6.preferences.Needs; import itu.abc4gsd.rcp.client_v6.view.abc4gsdPopUpNotification; import itu.abc4gsd.rcp.client_v6.view.model.ABC4GSDItem; import itu.abc4gsd.rcp.client_v6.view.model.ABC4GSDItemManager; import itu.abc4gsd.rcp.client_v6.view.model.IABC4GSDItem; import org.eclipse.core.runtime.Platform; import org.eclipse.core.runtime.preferences.ConfigurationScope; import org.eclipse.core.runtime.preferences.DefaultScope; import org.eclipse.core.runtime.preferences.IEclipsePreferences; import org.eclipse.swt.widgets.Display; import org.ini4j.Ini; import org.ini4j.InvalidFileFormatException; import org.json.simple.JSONArray; import org.json.simple.JSONObject; import org.json.simple.JSONValue; import org.zeromq.ZMQ; public class MasterClientBackEnd { class StreamGobbler extends Thread { InputStream is; String type; StreamGobbler(InputStream is, String type) { this.is = is; this.type = type; } public void run() { try { InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr); String line=null; while ( (line = br.readLine()) != null) System.out.println(type + ">" + line); } catch (IOException ioe) { ioe.printStackTrace(); } } } private class ApplicationDescriber { public String resourceId; public final String realId; public final String name; public final String artifact; public final String[] command; public final boolean hasInterface; public final boolean independent; // no need for versioning public final String appAssetId; public int pid; public ApplicationDescriber( String resourceId, String realId, String name, String artifact, String[] command, String interf, boolean independent, String appAssetId ) { this.resourceId = resourceId; this.realId = realId; this.name = name; this.artifact = artifact; this.command = command; this.hasInterface = interf.toLowerCase().equals("true"); this.independent = independent; this.appAssetId = appAssetId; } public ApplicationDescriber( ApplicationDescriber origin ) { this.resourceId = origin.resourceId; this.realId = origin.realId; this.name = origin.name; this.artifact = origin.artifact; this.command = Arrays.copyOf(origin.command, origin.command.length, String[].class);; this.hasInterface = origin.hasInterface; this.independent = origin.independent; this.appAssetId = origin.appAssetId; } } private static final boolean DBG = true; private final Lock lock = new ReentrantLock(); ConnectionManager connectionManager; private String[] addrs; private boolean resumeInProgress; protected long _currAct = -1; protected long _me = -1; protected int _confirmation = 0; protected List<String> _assetToKeep = new ArrayList<String>(); protected List<String> _actApp = new ArrayList<String>(); protected List<String> _model = new ArrayList<String>(); protected HashMap<String, ApplicationDescriber> _appId = new HashMap<String, ApplicationDescriber>(); protected HashMap<String, ApplicationDescriber> _frames = new HashMap<String, ApplicationDescriber>(); protected HashMap<String, String[]> _initInfo = new HashMap<String, String[]>(); private void initManager() { connectionManager = new ConnectionManager( addrs ); } private void initReceiver() { Executor executor = Executors.newFixedThreadPool(1); executor.execute( new Runnable() { public void run() { __receive(); } } ); } public MasterClientBackEnd( String[] addrs ) { // self._checkConfirmation = None this.addrs = addrs; initManager(); initReceiver(); } /* * Connection functions */ private void __receive() { connectionManager.locking += 1; try { ZMQ.Poller poller = connectionManager.getContext().poller(1); poller.register( connectionManager.control, ZMQ.Poller.POLLIN ); while (! connectionManager.close ) { poller.poll(); // pass 1000? if( poller.pollin(0) ) { String data = (String)connectionManager.recv( connectionManager.control, false ); //TODO log this System.out.println( "Received> " + data ); JSONObject resp = _handleLocalRequest( data ); connectionManager.send( connectionManager.control, (String) resp.get(Constants.MSG_A) ); } } connectionManager.locking -= 1; } catch (Exception e) { // TODO > here it's a mess connectionManager.locking -= 1; connectionManager.close(); initManager(); initReceiver(); } } /* * Applications communication */ @SuppressWarnings("unchecked") private JSONObject _handleLocalRequest( String wip ) { JSONObject resp = new JSONObject(); resp.put(Constants.MSG_A, ""); StringTokenizer msg = new StringTokenizer( wip, " " ); List<String> data = new ArrayList<String>(); while( msg.hasMoreTokens() ) data.add(msg.nextToken()); String remaining = wip.substring( data.get(0).length() ).trim(); if( data.get(0).equals( "ABC" ) ) { if( data.get(1).equals( "RESUME" ) && data.get(2).equals( "COMPLETED" ) ) _confirmation -= 1; if( data.get(1).equals( "SUSPEND" ) && data.get(2).equals( "COMPLETED" ) ) _confirmation -= 1; if( data.get(1).equals( "KILL" ) && data.get(2).equals( "COMPLETED" ) ) _confirmation -= 1; } try { if( data.get(0).equals( "INIT" ) ) { resp = new JSONObject(); resp.put("a", _initApplication( remaining )); } if( data.get(0).equals( "RESUME" ) ) { _resumeActivity( Long.parseLong(remaining) ); _openChat( Long.parseLong(remaining) ); } if( data.get(0).equals( "SUSPEND" ) ) _suspendActivity( Long.parseLong(remaining) ); if( data.get(0).equals( "ARTIFACT_LOAD" ) ) _loadArtifact( Long.parseLong(remaining) ); if( data.get(0).equals( "ARTIFACT_KILL" ) ) _killAsset( Long.parseLong(remaining), true ); if( data.get(0).equals( "ARTIFACT_STORE" ) ) _storeArtifact( Long.parseLong(remaining) ); if( data.get(0).equals( "QUERY" ) ) { // model = msg[1].split(' ', 1)[0] // q = msg[1].split(' ', 1)[1] resp = _query( wip ); } if( data.get(0).equals("NOTIFY") ) _notify( remaining ); if( data.get(0).equals("CHAT_OPEN") ) _openChat( Long.parseLong(remaining) ); if( data.get(0).equals("CHAT_WRITE") ) _writeInChat( remaining ); } catch (Exception e) { e.printStackTrace(); } return resp; } private JSONObject _query( String msg ) { InternalMessage recv = null; lock.lock(); try { _send( msg ); recv = _receive(); } finally { lock.unlock(); } return recv.data; } protected String[] query( String query ) { return query( query, "abc" ); } protected String[] query( String query, String model ) { String q = "QUERY " + model + " " + query; Object tmp = _query(q); List<String> wip = new ArrayList<String>(); tmp = ((JSONArray)((JSONObject)tmp).get(Constants.MSG_A)).get(2); if( tmp instanceof JSONArray ) for( int x=0; x<((JSONArray)tmp).size(); x++ ) wip.add( ((JSONArray)tmp).get(x).toString() ); else if ( tmp == null ) return new String[]{}; else wip.add( tmp.toString() ); return wip.toArray( new String[wip.size()]); } private void _resumeActivity( long actId ) { System.out.println( "Resuming ... " + actId ); resumeInProgress = true; // take all application // launch them by sending also uid and act id //if( _chekcConfirmation != null) // if self._checkConfirmation != None: // print "Still up - %s" % (self._confirmation,) // self._checkConfirmation[0](self._checkConfirmation[1]) // self._checkConfirmation = None String msg = "CMD RESUME ALL"; connectionManager.send( connectionManager.publisher, msg ); // Check if you are user of selected activity, if not ask to be linked String[] users = MasterClientWrapper.getInstance().query( "abc.user.[abc.state.[abc.state.[].activity.==." +actId+ "].user]._id" ); boolean present = false; for( String wip : users ) if( wip.equals( ""+_me ) ) present = true; if( ! present ) { // Creating the state IABC4GSDItem newState = new ABC4GSDItem( "abc.state" ); newState.set( "name", actId + ":" + _me ); newState.set( "state", Constants.STATE_UNKNOWN ); newState.attach( "activity", actId ); newState.attach( "user", _me ); // Creating the ecology String[] assets = MasterClientWrapper.getInstance().query("abc.ecology.[abc.ecology.[].name.~=.{{("+actId+"):[0-9]*}}].asset"); IABC4GSDItem newEcology = new ABC4GSDItem( "abc.ecology" ); newEcology.set( "name", actId + ":" + _me ); newEcology.attach( "activity", actId ); newEcology.attach( "user", _me ); for( String asset : assets ) newEcology.attach( "asset", asset ); } String q; // Set activity state to correct value with current user q = "abc.state.[abc.state.[].name.==." + actId + ":" + _me + "].state.=." + Constants.STATE_RESUMED; query( q ); q = "abc.activity." + actId + ".active.+." + _me; query( q ); // Set overall activity state to correct value ABC4GSDItem activity = new ABC4GSDItem("abc.activity", actId, new String[]{"state"}); if( activity.get("state").equals(Constants.ACT_INITIALIZED.toString()) ) { activity.set("state", Constants.ACT_ONGOING); } // build needed artifacts list // if _currAct != from -1 if( _currAct != -1 ) { buildToKeepList(actId); _suspendActivity(_currAct); } // Update for last-used-activity purposes _currAct = actId; ABC4GSDItem user = new ABC4GSDItem("abc.user", _me); user.set("activity", actId); _resumeAssets(); resumeInProgress = false; } private String[] getArtifactAsset( long actId ) { String q; String[] ids; // Get ids of assets that are artifacts q = "abc.ecology.[].name.==." + actId + ":" + _me; ids = query( q ); if( ids.length == 0 ) return ids; q = "abc.asset.[abc.ecology." + generateQueryString(ids) + ".asset].type.==.artifact"; ids = query( q ); return ids; // ids = ids.replace("\"", ""); // ids = ids.replace("[", ""); // ids = ids.replace("]", ""); // if( ids.equals("") ) return new String[]{}; // return ids.split(","); } private void buildToKeepList( long newActId ) { HashMap<String, String> oldArti = new HashMap<String, String>(); String[] nextAsset; for( Map.Entry<String, ApplicationDescriber> e : _frames.entrySet() ) oldArti.put(e.getValue().realId, e.getValue().resourceId); nextAsset = getArtifactAsset(newActId); String q, resp, resp2; _assetToKeep = new ArrayList<String>(); for( String wip : nextAsset ) { q = "abc.asset." + wip + ".ptr"; resp = query(q)[0]; if( oldArti.containsKey(resp) ) { q = "abc.property.[abc.property.[abc.property.[].name.==." + _me + ":" + wip + "].key.==.auto].value"; // Why is it returning true here and not the id? See #306 resp2 = query(q)[0]; if( resp2.toLowerCase().equals("true") ) _assetToKeep.add(resp); // contains id of artifacts pointed } } } private void _resumeAssets() { // abc.asset.[abc.ecology.[abc.ecology.[].name.==.168081276:168080412].asset].type.==.artifact // abc.asset.[abc.asset.[abc.ecology.[abc.ecology.[].name.==.{{159282924:159282492}}].asset].type.==.artifact].ptr String q; String[] ids, tmp; String[] assetIds = getArtifactAsset(_currAct); for( int x=0; x<assetIds.length; x++ ) { // here it should check artifacts ids not assets if( _frames.containsKey(assetIds[x]) ) continue; // the property might not be there // ... if it is there then the key==auto is there q = "abc.property.[].name.==." + _me + ":" + assetIds[x]; tmp = query(q); if( tmp.length == 0 ) return; // constructing the list of properties of this asset q = ""; for( String wip : tmp ) q += wip + ","; q = "abc.property.[abc.property.[" + q.substring(0,q.length()-1) + "].key.==.auto].value"; ids = query(q); // ids = ((JSONArray)((JSONArray)query( q ).get(Constants.MSG_A)).get(2)).get(0).toString(); if((ids.length > 0) && (ids[0].toLowerCase().equals("true") )) _loadArtifact(Long.parseLong(assetIds[x])); } } private void _suspendActivity( long actId ) { System.out.println( "Suspending ... " + actId ); query( "abc.state.[abc.state.[].name.==." + actId + ":" + _me + "].state.=." + Constants.STATE_SUSPENDED ); query( "abc.activity." + actId + ".active.-." + _me ); // send suspend to processes if( !resumeInProgress ) { String msg = "CMD SUSPEND ALL"; connectionManager.send( connectionManager.publisher, msg ); } // kill apps with no interface connection killApplications(); pushArtifacts(); // remove info about artifact query( "abc.user." + _me + ".artifact.=.-1" ); query( "abc.user." + _me + ".activity.=.-1" ); System.out.println( "Suspension done!" ); } private void _killAsset( String assetId, boolean single ) { _killAsset( Long.parseLong(assetId), single ); } private void _killAsset( long assetId, boolean single ) { System.out.println( "Killing asset ... " + assetId ); // Storing and killing specific app/art Runtime run = Runtime.getRuntime(); Process pr; String[] cmd; String cmd2; List<String> toRemove = new ArrayList<String>(); ApplicationDescriber entry = _frames.get(""+assetId); // No record of the application to be killed if( entry == null ) return; // To avoid Eclipse to shut down if( ! entry.hasInterface ) { if( _appId.containsKey(assetId) ) { cmd = Utils.terminateProcessCommand( ""+_appId.get(assetId).pid ); _appId.remove(assetId); } else { cmd = Utils.terminateWindowCommand( entry.name, entry.artifact ); } cmd2 = ""; for( String wipwip : cmd ) { if( cmd2.length() > 0 ) cmd2 += " "; cmd2 += wipwip; } System.out.println( cmd2 ); try { pr = run.exec( cmd ); pr.waitFor(); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } } else if( _appId.containsKey(assetId) ) _appId.remove(assetId); _frames.remove( ""+assetId ); if( single ) pushArtifacts(); } private void _loadArtifact( long artifactId ) { // it is asset id System.out.println( "Loading artifact ... " + artifactId ); String q; q = "abc.user."+_me+".artifact.=."+artifactId; query(q); q = "abc.asset." + artifactId + ".ptr"; String realId = query(q)[0]; for( Map.Entry<String, ApplicationDescriber> e : _frames.entrySet() ) // if the artifact is found in the frames // ... update the descriptor and return if( e.getValue().realId.equals(realId) ) { _frames.remove(e); ApplicationDescriber tmp = new ApplicationDescriber(e.getValue()); tmp.resourceId = ""+artifactId; _frames.put(tmp.resourceId, tmp ); return; } // else create the new frame String location, name, type; boolean independent; ABC4GSDItem asset = new ABC4GSDItem( "abc.asset", artifactId, new String[]{"ptr"} ); realId = asset.get("ptr").toString(); ABC4GSDItem artifact = new ABC4GSDItem( "abc.artifact", realId, new String[]{"name","location","independent","type"} ); name = artifact.get("name").toString(); location = artifact.get("location").toString(); independent = artifact.get("independent").toString().toLowerCase().equals("true"); type = artifact.get("type").toString(); if( !independent ) getUpdatedArtifact( location, name ); else checkLocation( location, name ); launchArtifact( location, name, type, ""+artifactId, independent, realId ); } private void checkLocation( String location, String name ) { if( !location.startsWith("git://") ) return; // the name needs to be the same as the ending of location for git File f = new File( Platform.getLocation().toFile(), name ); Runtime run = Runtime.getRuntime(); Process pr; BufferedReader buf; String wip, line; if( !f.exists() ) { try { wip = "git clone " + location; pr = run.exec( wip, null, Platform.getLocation().toFile() ); pr.waitFor(); buf = new BufferedReader(new InputStreamReader(pr.getInputStream())); line = ""; while ((line=buf.readLine())!=null) System.out.println(line); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } } else { try { pr = run.exec( "git pull", null, f ); pr.waitFor(); buf = new BufferedReader(new InputStreamReader(pr.getInputStream())); line = ""; while ((line=buf.readLine())!=null) System.out.println(line); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } } } private void getUpdatedArtifact( String location, String name ) { // TODO > make it specific for the file IEclipsePreferences prefs = ConfigurationScope.INSTANCE.getNode(Activator.PLUGIN_ID); String baseDir = prefs.get(Needs.LOCAL_REPO, DefaultScope.INSTANCE.getNode(Activator.PLUGIN_ID).get(Needs.LOCAL_REPO, "")); File f = new File( Platform.getLocation().toFile(), baseDir ); Runtime run = Runtime.getRuntime(); Process pr; BufferedReader buf; String wip, line; if( !f.exists() ) { try { wip = "git clone " + prefs.get(ConnectionAdvanced.ADDR_REPOSITORY, DefaultScope.INSTANCE.getNode(Activator.PLUGIN_ID).get(ConnectionAdvanced.ADDR_REPOSITORY, "")); pr = run.exec( wip, null, Platform.getLocation().toFile() ); pr.waitFor(); buf = new BufferedReader(new InputStreamReader(pr.getInputStream())); line = ""; while ((line=buf.readLine())!=null) System.out.println(line); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } } else { try { pr = run.exec( "git pull", null, f ); pr.waitFor(); buf = new BufferedReader(new InputStreamReader(pr.getInputStream())); line = ""; while ((line=buf.readLine())!=null) System.out.println(line); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } } // TODO> understand better this horrible hack. File[] fList = f.listFiles(); for( File xx : fList ) if( xx.getName().startsWith(".~lock") ) xx.delete(); System.out.println( f.toString() ); } private void launchArtifact( String location, String name, String type, String resourceId, boolean independent, String realId ) { String q, need; String[] propertyId, wip; String appAssetId = ""; q = "abc.property.[].name.==." + _me + ":" + resourceId; propertyId = query(q); if( propertyId.length == 1 ) { // only the autoload launchArtifactFirst(location, name, type, resourceId); q = "abc.property.[].name.==." + _me + ":" + resourceId; propertyId = query(q); } q = "abc.property.[abc.property." + generateQueryString(propertyId) + ".key.==.needId].value"; wip = query(q); if( wip != null && wip.length > 0 ) { appAssetId = wip[0]; q = "abc.application.[abc.asset." + generateQueryString(wip) + ".ptr].need"; wip = query(q); need = wip.length > 0 ? wip[0] : ""; } else need = ""; // need = ((JSONArray)((JSONArray)query(q).get(Constants.MSG_A)).get(2)).get(0).toString(); String[] needInfo = getNeedInfo(need); String[] wip2; wip2 = needInfo[2].split(" "); if( needInfo.length > 3 ) for( int x=0; x<wip2.length; x++ ) if( wip2[x].equals(needInfo[3]) ) if(!independent) wip2[x] = location; else wip2[x] = (new File( Platform.getLocation().toFile(), name)).toString(); IEclipsePreferences prefs = ConfigurationScope.INSTANCE.getNode(Activator.PLUGIN_ID); String baseDir = prefs.get(Needs.LOCAL_REPO, DefaultScope.INSTANCE.getNode(Activator.PLUGIN_ID).get(Needs.LOCAL_REPO, "")); File f; if( !independent ) { location = (new File(location)).getName(); f = new File( Platform.getLocation().toFile(), baseDir ); } else { f = null; } ApplicationDescriber describer = new ApplicationDescriber(resourceId, realId, needInfo[0], location, wip2, needInfo[1], independent, appAssetId); if( !_assetToKeep.contains(describer.realId) ) _execute( describer, f ); } private String[] getNeedInfo( String need ) { IEclipsePreferences prefs = ConfigurationScope.INSTANCE.getNode(Activator.PLUGIN_ID); Ini ini = new Ini(); String command = ""; String[] parameters = null; String name = ""; String interf = ""; // loading general need // TODO > load previously linked needAsset try { ini.load( new ByteArrayInputStream(prefs.get(Needs.NEEDS, DefaultScope.INSTANCE.getNode(Activator.PLUGIN_ID).get(Needs.NEEDS, "ERROR")).getBytes() ) ); Map<String, String> map; map = ini.get("needs"); if( !map.containsKey(need) ) need = "*"; map = ini.get( map.get(need) ); command = map.get("command"); name = map.get("name"); interf = map.get("interface"); parameters = new String[ Integer.parseInt(map.get("paramNr")) ]; for( int x=0; x< parameters.length; x++ ) parameters[x] = map.get("param" + x); } catch (InvalidFileFormatException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } String[] resp = new String[ parameters.length + 3 ]; resp[0] = name; resp[1] = interf; resp[2] = command; for( int x=0; x<parameters.length; x++ ) resp[x+3] = parameters[x]; return resp; } private void launchArtifactFirst( String location, String name, String type, String resourceId ) { String q; IABC4GSDItem application; q = "abc.application.[].need.==.{{" + type + "}}"; String[] tmp = query(q); if( tmp == null || tmp.length == 0 ) { // creating the application if never used so far application = new ABC4GSDItem("abc.application"); application.set("need", type); } else application = new ABC4GSDItem("abc.application",tmp[0]); // creating the asset to wrap the need IABC4GSDItem asset = new ABC4GSDItem("abc.asset"); asset.set("ptr", application.getId()); asset.set("type", "application"); asset.set("name", "APP"); // creating property for linking resource with need running it IABC4GSDItem property = new ABC4GSDItem("abc.property"); property.attach("asset", resourceId); property.attach("user", _me); property.set("name", _me + ":" + resourceId); property.set("key", "needId"); property.set("value", asset.getId()); } private void _storeArtifact( long artifactId ) {} private void pushArtifacts() { // TODO > make it specific for the file // IEclipsePreferences prefs = ConfigurationScope.INSTANCE.getNode(Application.PLUGIN_ID); // String baseDir = prefs.get(Needs.LOCAL_REPO, DefaultScope.INSTANCE.getNode(Application.PLUGIN_ID).get(Needs.LOCAL_REPO, "")); System.out.println("Pushing repo ..."); File[] tmp = Platform.getLocation().toFile().listFiles(); List<File> dirs = new ArrayList<File>(); for( int x=0; x<tmp.length; x++ ) if(tmp[x].isDirectory()) dirs.add(tmp[x]); for( File x : dirs ) { File f = new File( Platform.getLocation().toFile(), x.getName() ); Runtime run = Runtime.getRuntime(); Process pr; BufferedReader buf; String line; if( !f.exists() ) { continue; } else { try { pr = run.exec( "git add *", null, f ); pr.waitFor(); buf = new BufferedReader(new InputStreamReader(pr.getInputStream())); line = ""; while ((line=buf.readLine())!=null) System.out.println(line); pr = run.exec( "git commit -m aaa -a", null, f ); pr.waitFor(); buf = new BufferedReader(new InputStreamReader(pr.getInputStream())); line = ""; while ((line=buf.readLine())!=null) System.out.println(line); pr = run.exec( "git push origin master", null, f ); pr.waitFor(); buf = new BufferedReader(new InputStreamReader(pr.getInputStream())); line = ""; while ((line=buf.readLine())!=null) System.out.println(line); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } } System.out.println( f.toString() ); } } /* * Applications execution functions */ private void killApplications() { Map<String, ApplicationDescriber> tmpFrames = new HashMap<String, ApplicationDescriber>(_frames); for( String k : tmpFrames.keySet() ) { if(!_assetToKeep.contains( tmpFrames.get(k).realId )) _killAsset(k, false); } } private void _execute( final ApplicationDescriber describer, final File environment ) { Display.getDefault().asyncExec( new Runnable() { public void run() { Runtime run = Runtime.getRuntime(); Process pr; BufferedReader buf; String line; try { for( String x : describer.command ) System.out.println(x); if( environment != null ) { pr = run.exec( describer.command, null, environment ); // any error message? StreamGobbler errorGobbler = new StreamGobbler(pr.getErrorStream(), "ERR"); // any output? StreamGobbler outputGobbler = new StreamGobbler(pr.getInputStream(), "OUT"); // kick them off errorGobbler.start(); outputGobbler.start(); // any error??? int exitVal = pr.waitFor(); System.out.println("ExitValue: " + exitVal); } else pr = run.exec( describer.command ); _frames.put( describer.resourceId, describer ); if( describer.independent ) { int pid = Utils.getPid(pr); describer.pid = pid; _appId.put(describer.resourceId, describer); } } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } }}); } private String _initApplication( String name ) { //u_id [currAct appAssetId artifactAssetId] List<String> param = new ArrayList<String>(); param.add( ""+_me ); for( Map.Entry<String, ApplicationDescriber> entry : _appId.entrySet() ) { if( name.equals( entry.getValue().name ) ) { param.add( ""+_currAct ); param.add( entry.getValue().appAssetId ); param.add( entry.getValue().resourceId ); } } String out = ""; for( String each: param ) { if( out.length() > 0 ) out += " "; out += each; } System.out.println(out); return out; } /* * Functions to handle server */ protected InternalMessage connect( String name, String model ) { if( _model.contains(model) ) return null; String mainMsg = "CONNECT " + model + " USER " + name; _send(mainMsg); InternalMessage resp = _receive(); System.out.println( resp ); if( resp.code.length() > 0 ) { _me = Long.parseLong((String) ((JSONArray) ((JSONArray) resp.data.get(Constants.MSG_A)).get(2)).get(1)); // String dirTmp = Utils.getTmpDir(); // String msg = "QUERY " + model + " abc.user." + _me + ".tmp_dir.=.{{" + dirTmp + "}}"; // _query( msg ); System.out.println( "Connected to " + model ); _model.add( model ); String msg = "QUERY " + model + " abc.user." + _me + ".state.=." + Constants.USR_CONNECTED; _query( msg ); } return new InternalMessage( mainMsg, resp ); } protected InternalMessage disconnect( String model ) { // rest -> <model> if( !_model.contains(model) ) return null; String msg = "QUERY " + model + " abc.user." + _me + ".state.=." + Constants.USR_DISCONNECTED; _query( msg ); msg = "DISCONNECT " + model; _send(msg); InternalMessage resp = _receive(); if( resp.code.length() > 0 ) { System.out.println( "Disconnected from " + model ); _model.remove( model ); } return new InternalMessage(msg, resp); } private void _send( String msg ) { long code = Utils.getRandomId(); String wip = "CODE " + code + " FROM " + _me + " " + msg; connectionManager.send( connectionManager.backend, wip, true ); } private InternalMessage _receive() { JSONObject wip = (JSONObject)connectionManager.recv( connectionManager.backend, true ); return new InternalMessage("Old", wip ); } protected InternalMessage run( String wip ) { String msg = "RUN " + wip; InternalMessage resp = null; lock.lock(); try { _send( msg ); resp = _receive(); } finally { lock.unlock(); } return new InternalMessage( msg, resp ); } public List<String> getModel() { return _model; } public ConnectionManager getConnectionManager() { return connectionManager; } private String generateQueryString( String[] wip ) { String ret = ""; for( int x=0; x<wip.length; x++ ) ret += wip[x] + ", "; if( ret.length() > 0 ) ret = "[" + ret.substring(0, ret.length()-2) + "]"; else ret = "[]"; return ret; } private void _notify( String msg ) { String wip = "CMD NOTIFICATION " + msg; connectionManager.send( connectionManager.publisher, wip ); } public void _openChat( long actId ) { String wip = "CMD CHAT_OPEN " + actId; connectionManager.send( connectionManager.publisher, wip ); } public void _writeInChat( String msg ) { String wip = "CMD CHAT_WRITE " + msg; connectionManager.send( connectionManager.publisher, wip ); } public void suspend() { if( _currAct != -1 ) _suspendActivity(_currAct); } }
package org.docksidestage.hangar.dbflute.bsentity; import java.util.List; import java.util.ArrayList; import org.dbflute.Entity; import org.dbflute.dbmeta.DBMeta; import org.dbflute.dbmeta.AbstractEntity; import org.dbflute.dbmeta.accessory.DomainEntity; import org.dbflute.optional.OptionalEntity; import org.docksidestage.hangar.dbflute.allcommon.DBMetaInstanceHandler; import org.docksidestage.hangar.dbflute.exentity.*; /** * The entity of WHITE_BASE_ONE11_CIRQUE as TABLE. <br> * <pre> * [primary-key] * CIRQUE_ID * * [column] * CIRQUE_ID, CIRQUE_NAME * * [sequence] * * * [identity] * * * [version-no] * * * [foreign table] * WHITE_BASE * * [referrer table] * * * [foreign property] * whiteBase * * [referrer property] * * * [get/set template] * /= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = * Integer cirqueId = entity.getCirqueId(); * String cirqueName = entity.getCirqueName(); * entity.setCirqueId(cirqueId); * entity.setCirqueName(cirqueName); * = = = = = = = = = =/ * </pre> * @author DBFlute(AutoGenerator) */ public abstract class BsWhiteBaseOne11Cirque extends AbstractEntity implements DomainEntity { // =================================================================================== // Definition // ========== /** The serial version UID for object serialization. (Default) */ private static final long serialVersionUID = 1L; // =================================================================================== // Attribute // ========= /** CIRQUE_ID: {PK, NotNull, INTEGER(10), FK to WHITE_BASE} */ protected Integer _cirqueId; /** CIRQUE_NAME: {NotNull, VARCHAR(200)} */ protected String _cirqueName; // =================================================================================== // DB Meta // ======= /** {@inheritDoc} */ public DBMeta asDBMeta() { return DBMetaInstanceHandler.findDBMeta(asTableDbName()); } /** {@inheritDoc} */ public String asTableDbName() { return "WHITE_BASE_ONE11_CIRQUE"; } // =================================================================================== // Key Handling // ============ /** {@inheritDoc} */ public boolean hasPrimaryKeyValue() { if (_cirqueId == null) { return false; } return true; } // =================================================================================== // Foreign Property // ================ /** WHITE_BASE by my CIRQUE_ID, named 'whiteBase'. */ protected OptionalEntity<WhiteBase> _whiteBase; /** * [get] WHITE_BASE by my CIRQUE_ID, named 'whiteBase'. <br> * Optional: alwaysPresent(), ifPresent().orElse(), get(), ... * @return The entity of foreign property 'whiteBase'. (NotNull, EmptyAllowed: when e.g. null FK column, no setupSelect) */ public OptionalEntity<WhiteBase> getWhiteBase() { if (_whiteBase == null) { _whiteBase = OptionalEntity.relationEmpty(this, "whiteBase"); } return _whiteBase; } /** * [set] WHITE_BASE by my CIRQUE_ID, named 'whiteBase'. * @param whiteBase The entity of foreign property 'whiteBase'. (NullAllowed) */ public void setWhiteBase(OptionalEntity<WhiteBase> whiteBase) { _whiteBase = whiteBase; } // =================================================================================== // Referrer Property // ================= protected <ELEMENT> List<ELEMENT> newReferrerList() { // overriding to import return new ArrayList<ELEMENT>(); } // =================================================================================== // Basic Override // ============== @Override protected boolean doEquals(Object obj) { if (obj instanceof BsWhiteBaseOne11Cirque) { BsWhiteBaseOne11Cirque other = (BsWhiteBaseOne11Cirque)obj; if (!xSV(_cirqueId, other._cirqueId)) { return false; } return true; } else { return false; } } @Override protected int doHashCode(int initial) { int hs = initial; hs = xCH(hs, asTableDbName()); hs = xCH(hs, _cirqueId); return hs; } @Override protected String doBuildStringWithRelation(String li) { StringBuilder sb = new StringBuilder(); if (_whiteBase != null && _whiteBase.isPresent()) { sb.append(li).append(xbRDS(_whiteBase, "whiteBase")); } return sb.toString(); } protected <ET extends Entity> String xbRDS(org.dbflute.optional.OptionalEntity<ET> et, String name) { // buildRelationDisplayString() return et.get().buildDisplayString(name, true, true); } @Override protected String doBuildColumnString(String dm) { StringBuilder sb = new StringBuilder(); sb.append(dm).append(xfND(_cirqueId)); sb.append(dm).append(xfND(_cirqueName)); if (sb.length() > dm.length()) { sb.delete(0, dm.length()); } sb.insert(0, "{").append("}"); return sb.toString(); } @Override protected String doBuildRelationString(String dm) { StringBuilder sb = new StringBuilder(); if (_whiteBase != null && _whiteBase.isPresent()) { sb.append(dm).append("whiteBase"); } if (sb.length() > dm.length()) { sb.delete(0, dm.length()).insert(0, "(").append(")"); } return sb.toString(); } @Override public WhiteBaseOne11Cirque clone() { return (WhiteBaseOne11Cirque)super.clone(); } // =================================================================================== // Accessor // ======== /** * [get] CIRQUE_ID: {PK, NotNull, INTEGER(10), FK to WHITE_BASE} <br> * @return The value of the column 'CIRQUE_ID'. (basically NotNull if selected: for the constraint) */ public Integer getCirqueId() { checkSpecifiedProperty("cirqueId"); return _cirqueId; } /** * [set] CIRQUE_ID: {PK, NotNull, INTEGER(10), FK to WHITE_BASE} <br> * @param cirqueId The value of the column 'CIRQUE_ID'. (basically NotNull if update: for the constraint) */ public void setCirqueId(Integer cirqueId) { registerModifiedProperty("cirqueId"); _cirqueId = cirqueId; } /** * [get] CIRQUE_NAME: {NotNull, VARCHAR(200)} <br> * @return The value of the column 'CIRQUE_NAME'. (basically NotNull if selected: for the constraint) */ public String getCirqueName() { checkSpecifiedProperty("cirqueName"); return convertEmptyToNull(_cirqueName); } /** * [set] CIRQUE_NAME: {NotNull, VARCHAR(200)} <br> * @param cirqueName The value of the column 'CIRQUE_NAME'. (basically NotNull if update: for the constraint) */ public void setCirqueName(String cirqueName) { registerModifiedProperty("cirqueName"); _cirqueName = cirqueName; } }
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bitcoinj.protocols.channels; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import com.google.common.collect.MultimapBuilder; import org.bitcoinj.core.*; import org.bitcoinj.crypto.TransactionSignature; import org.bitcoinj.script.Script; import org.bitcoinj.script.ScriptBuilder; import org.bitcoinj.wallet.AllowUnconfirmedCoinSelector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.spongycastle.crypto.params.KeyParameter; import javax.annotation.Nullable; import java.math.BigInteger; import java.util.List; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; /** * Version 2 of the payment channel state machine - uses CLTV opcode transactions * instead of multisig transactions. */ public class PaymentChannelV2ClientState extends PaymentChannelClientState { private static final Logger log = LoggerFactory.getLogger(PaymentChannelV1ClientState.class); // How much value (in satoshis) is locked up into the channel. private final Coin totalValue; // When the channel will automatically settle in favor of the client, if the server halts before protocol termination // specified in terms of block timestamps (so it can off real time by a few hours). private final long expiryTime; // The refund is a time locked transaction that spends all the money of the channel back to the client. // Unlike in V1 this refund isn't signed by the server - we only have to sign it ourselves. @VisibleForTesting Transaction refundTx; private Coin refundFees; // The multi-sig contract locks the value of the channel up such that the agreement of both parties is required // to spend it. private Transaction contract; PaymentChannelV2ClientState(StoredClientChannel storedClientChannel, Wallet wallet) throws VerificationException { super(storedClientChannel, wallet); // The PaymentChannelClientConnection handles storedClientChannel.active and ensures we aren't resuming channels this.contract = checkNotNull(storedClientChannel.contract); this.expiryTime = storedClientChannel.expiryTime; this.totalValue = contract.getOutput(0).getValue(); this.valueToMe = checkNotNull(storedClientChannel.valueToMe); this.refundTx = checkNotNull(storedClientChannel.refund); this.refundFees = checkNotNull(storedClientChannel.refundFees); stateMachine.transition(State.READY); initWalletListeners(); } public PaymentChannelV2ClientState(Wallet wallet, ECKey myKey, ECKey serverMultisigKey, Coin value, long expiryTimeInSeconds) throws VerificationException { super(wallet, myKey, serverMultisigKey, value, expiryTimeInSeconds); checkArgument(value.signum() > 0); initWalletListeners(); this.valueToMe = this.totalValue = checkNotNull(value); this.expiryTime = expiryTimeInSeconds; stateMachine.transition(State.NEW); } @Override protected Multimap<State, State> getStateTransitions() { Multimap<State, State> result = MultimapBuilder.enumKeys(State.class).arrayListValues().build(); result.put(State.UNINITIALISED, State.NEW); result.put(State.UNINITIALISED, State.READY); result.put(State.NEW, State.SAVE_STATE_IN_WALLET); result.put(State.SAVE_STATE_IN_WALLET, State.PROVIDE_MULTISIG_CONTRACT_TO_SERVER); result.put(State.PROVIDE_MULTISIG_CONTRACT_TO_SERVER, State.READY); result.put(State.READY, State.EXPIRED); result.put(State.READY, State.CLOSED); return result; } @Override public int getMajorVersion() { return 2; } @Override public synchronized void initiate(@Nullable KeyParameter userKey) throws ValueOutOfRangeException, InsufficientMoneyException { final NetworkParameters params = wallet.getParams(); Transaction template = new Transaction(params); // There is also probably a change output, but we don't bother shuffling them as it's obvious from the // format which one is the change. If we start obfuscating the change output better in future this may // be worth revisiting. Script redeemScript = ScriptBuilder.createCLTVPaymentChannelOutput(BigInteger.valueOf(expiryTime), myKey, serverKey); TransactionOutput transactionOutput = template.addOutput(totalValue, ScriptBuilder.createP2SHOutputScript(redeemScript)); if (transactionOutput.getMinNonDustValue().compareTo(totalValue) > 0) throw new ValueOutOfRangeException("totalValue too small to use"); Wallet.SendRequest req = Wallet.SendRequest.forTx(template); req.coinSelector = AllowUnconfirmedCoinSelector.get(); editContractSendRequest(req); req.shuffleOutputs = false; // TODO: Fix things so shuffling is usable. req.aesKey = userKey; wallet.completeTx(req); Coin multisigFee = req.tx.getFee(); contract = req.tx; // Build a refund transaction that protects us in the case of a bad server that's just trying to cause havoc // by locking up peoples money (perhaps as a precursor to a ransom attempt). We time lock it so the server // has an assurance that we cannot take back our money by claiming a refund before the channel closes - this // relies on the fact that since Bitcoin 0.8 time locked transactions are non-final. This will need to change // in future as it breaks the intended design of timelocking/tx replacement, but for now it simplifies this // specific protocol somewhat. refundTx = new Transaction(params); refundTx.addInput(contract.getOutput(0)).setSequenceNumber(0); // Allow replacement when it's eventually reactivated. refundTx.setLockTime(expiryTime); if (totalValue.compareTo(Coin.CENT) < 0) { // Must pay min fee. final Coin valueAfterFee = totalValue.subtract(Transaction.REFERENCE_DEFAULT_MIN_TX_FEE); if (Transaction.MIN_NONDUST_OUTPUT.compareTo(valueAfterFee) > 0) throw new ValueOutOfRangeException("totalValue too small to use"); refundTx.addOutput(valueAfterFee, myKey.toAddress(params)); refundFees = multisigFee.add(Transaction.REFERENCE_DEFAULT_MIN_TX_FEE); } else { refundTx.addOutput(totalValue, myKey.toAddress(params)); refundFees = multisigFee; } TransactionSignature refundSignature = refundTx.calculateSignature(0, myKey.maybeDecrypt(userKey), getSignedScript(), Transaction.SigHash.ALL, false); refundTx.getInput(0).setScriptSig(ScriptBuilder.createCLTVPaymentChannelP2SHRefund(refundSignature, redeemScript)); refundTx.getConfidence().setSource(TransactionConfidence.Source.SELF); log.info("initiated channel with contract {}", contract.getHashAsString()); stateMachine.transition(State.SAVE_STATE_IN_WALLET); // Client should now call getIncompleteRefundTransaction() and send it to the server. } @Override protected synchronized Coin getValueToMe() { return valueToMe; } protected long getExpiryTime() { return expiryTime; } @Override public synchronized Transaction getContract() { checkState(contract != null); if (stateMachine.getState() == State.PROVIDE_MULTISIG_CONTRACT_TO_SERVER) { stateMachine.transition(State.READY); } return contract; } @Override protected synchronized Transaction getContractInternal() { return contract; } protected synchronized Script getContractScript() { return contract.getOutput(0).getScriptPubKey(); } @Override protected Script getSignedScript() { return ScriptBuilder.createCLTVPaymentChannelOutput(BigInteger.valueOf(expiryTime), myKey, serverKey); } @Override public synchronized Coin getRefundTxFees() { checkState(getState().compareTo(State.NEW) > 0); return refundFees; } @VisibleForTesting Transaction getRefundTransaction() { return refundTx; } @Override @VisibleForTesting synchronized void doStoreChannelInWallet(Sha256Hash id) { StoredPaymentChannelClientStates channels = (StoredPaymentChannelClientStates) wallet.getExtensions().get(StoredPaymentChannelClientStates.EXTENSION_ID); checkNotNull(channels, "You have not added the StoredPaymentChannelClientStates extension to the wallet."); checkState(channels.getChannel(id, contract.getHash()) == null); storedChannel = new StoredClientChannel(getMajorVersion(), id, contract, refundTx, myKey, serverKey, valueToMe, refundFees, expiryTime, true); channels.putChannel(storedChannel); } @Override public Coin getTotalValue() { return totalValue; } }
package com.sdeli.imgurian; import com.nostra13.universalimageloader.core.ImageLoader; import com.sdeli.imgurian.gallery.Gallery; import com.sdeli.imgurian.gallery.Gallery.*; import com.sdeli.imgurian.gallery.GalleryEntry; import com.sdeli.imgurian.login.ImgurAuthorization; import com.sdeli.imgurian.login.LoginActivity; import android.app.Activity; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.os.Bundle; import android.text.InputType; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.EditText; import android.widget.GridView; import android.widget.ImageView; import android.widget.ProgressBar; import android.widget.RelativeLayout; import android.widget.Spinner; import java.util.LinkedHashMap; public class MainActivity extends Activity { private static final String TAG = MainActivity.class.getSimpleName(); private static Intent entryIntent; private Gallery mGallery; private GalleryAdapter mAdapter; private ProgressBar mBar; private Spinner mSectionSpinner; private ArrayAdapter<Section> mSectionAdapter; private Spinner mSortSpinner; private Spinner mTimeSpinner; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); int columns = (int)(getResources().getDisplayMetrics().widthPixels / getResources().getDisplayMetrics().densityDpi*1.5); if(columns > 5){ columns = 5; } mAdapter = new GalleryAdapter(this); mBar = (ProgressBar)findViewById(R.id.bar); entryIntent = new Intent(MainActivity.this, MultiEntryFragmentActivity.class); GridView gridView = (GridView) findViewById(R.id.gallery); gridView.setNumColumns(columns); gridView.setAdapter(mAdapter); gridView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { entryIntent.putExtra("position", position); //Optional parameters startActivity(entryIntent); } }); mGallery = Gallery.getInstance(); mSectionSpinner = (Spinner)findViewById(R.id.section_selector); mSectionAdapter = new ArrayAdapter<>(getApplicationContext(),android.R.layout.simple_spinner_item, Section.values()); mSectionAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mSectionSpinner.setAdapter(mSectionAdapter); mSortSpinner = (Spinner)findViewById(R.id.sort_selector); mTimeSpinner = (Spinner)findViewById(R.id.time_selector); final Context context = this; mSectionSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { boolean firstSelection = true; private Section section; @Override public void onItemSelected(final AdapterView<?> parent, View view, final int position, long id) { if(!firstSelection) { section = (Section) parent.getItemAtPosition(position); mSortSpinner.setVisibility(View.GONE); mTimeSpinner.setVisibility(View.GONE); setSortList(Sort.values(section)); Section.SUBREDDIT.setSubreddit(null); switch (section) { case SUBREDDIT: AlertDialog.Builder builder = new AlertDialog.Builder(context); builder.setTitle("Enter Subreddit(i.e. \"funny\")"); final EditText input = new EditText(context); input.setInputType(InputType.TYPE_CLASS_TEXT); builder.setView(input); builder.setPositiveButton("OK", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { String subReddit = "r/" + input.getText().toString() + "/"; section.setSubreddit(subReddit); mSectionAdapter.notifyDataSetChanged(); mGallery.updateUrl(Section.SUBREDDIT, (Sort) mSortSpinner.getSelectedItem(), (TimeSpan) mTimeSpinner.getSelectedItem(), subReddit); } }); builder.show(); break; default: mGallery.updateUrl(section, (Sort) mSortSpinner.getSelectedItem(), (TimeSpan) mTimeSpinner.getSelectedItem(), section.getSubreddit()); } refresh(); }else{ section = (Section) parent.getItemAtPosition(position); mSortSpinner.setVisibility(View.GONE); mTimeSpinner.setVisibility(View.GONE); setSortList(Sort.values(section)); firstSelection = false; } } @Override public void onNothingSelected(AdapterView<?> parent) { } private void setSortList(Sort[] sortArray){ ArrayAdapter<Sort> adapter = new ArrayAdapter<>(getApplicationContext(), android.R.layout.simple_spinner_item, sortArray); adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mSortSpinner.setAdapter(adapter); if(sortArray.length > 0) { mSortSpinner.setVisibility(View.VISIBLE); } } }); mSortSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { boolean firstSelection = true; @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { if(!firstSelection) { Section section = (Section)mSectionSpinner.getSelectedItem(); Sort sort = (Sort)parent.getItemAtPosition(position); ArrayAdapter<TimeSpan> timeAdapter = new ArrayAdapter<>(getApplicationContext(), android.R.layout.simple_spinner_item, TimeSpan.values()); timeAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mTimeSpinner.setAdapter(timeAdapter); mGallery.updateUrl(section, sort, (TimeSpan)mTimeSpinner.getSelectedItem(), section.getSubreddit()); switch(sort){ case MOST_VIEWED: case HIGHEST_SCORING: mTimeSpinner.setVisibility(View.VISIBLE); break; default: mTimeSpinner.setVisibility(View.GONE); } refresh(); }else{ firstSelection = false; } } @Override public void onNothingSelected(AdapterView<?> parent) { } }); mTimeSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { boolean firstSelection = true; @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { if(!firstSelection) { Section section = (Section)mSectionSpinner.getSelectedItem(); Sort sort = (Sort)mSortSpinner.getSelectedItem(); TimeSpan timeSpan = (TimeSpan)parent.getItemAtPosition(position); mGallery.updateUrl(section, sort, timeSpan, section.getSubreddit()); refresh(); }else{ firstSelection = false; } } @Override public void onNothingSelected(AdapterView<?> parent) { } }); mGallery.getThumbnails(new Gallery.GalleryLoadCallback() { @Override public void success(LinkedHashMap<String, GalleryEntry> entries) { mBar.setVisibility(View.GONE); mAdapter.addAll(entries.values()); } @Override public void failure(Exception exception) { Log.e(TAG, exception.getMessage(), exception); } @Override public void ignored(String reason) { Log.w(TAG, reason); } }); } @Override public boolean onCreateOptionsMenu(Menu menu) { menu.add(Menu.NONE, 0, Menu.NONE, "Refresh").setIcon(R.drawable.refresh_icon) .setShowAsAction(MenuItem.SHOW_AS_ACTION_IF_ROOM); menu.add(Menu.NONE, 1, Menu.NONE, "Settings").setIcon(R.drawable.three_lines) .setShowAsAction(MenuItem.SHOW_AS_ACTION_IF_ROOM); return super.onCreateOptionsMenu(menu); } @Override public boolean onOptionsItemSelected(MenuItem item){ switch (item.getItemId()) { case 0: refresh(); return true; case 1: if(!ImgurAuthorization.getInstance().isLoggedIn()) { Intent intent = new Intent(MainActivity.this, LoginActivity.class); MainActivity.this.startActivity(intent); }else{ ImgurAuthorization.getInstance().logout(); } return true; default: return super.onOptionsItemSelected(item); } } @Override protected void onPause(){ super.onPause(); mGallery.saveSettings(); } @Override public void onWindowFocusChanged(boolean hasFocus){ if(hasFocus){ mAdapter.clear(); mAdapter.addAll(mGallery.getEntries().values()); mAdapter.notifyDataSetChanged(); } } private void refresh(){ mAdapter.clear(); mBar.setVisibility(View.VISIBLE); mGallery.loadGallery(new Gallery.GalleryLoadCallback() { @Override public void success(LinkedHashMap<String, GalleryEntry> entries) { mBar.setVisibility(View.GONE); mAdapter.addAll(entries.values()); } @Override public void failure(Exception exception) { Log.e(TAG, exception.getMessage(), exception); } @Override public void ignored(String reason) { Log.w(TAG, reason); } }); } static class ViewHolder { RelativeLayout background; ImageView icon; ImageView cornerTab; ImageView favoriteIcon; } private class GalleryAdapter extends ArrayAdapter<GalleryEntry> { public GalleryAdapter(Context context) { super(context, 0); } @Override public View getView(int position, View convertView, ViewGroup parent) { ViewHolder holder; if (convertView == null) { convertView = getLayoutInflater().inflate(R.layout.gallery_icon, null); holder = new ViewHolder(); holder.background = (RelativeLayout) convertView.findViewById(R.id.background); holder.icon = (ImageView) convertView.findViewById(R.id.image); holder.cornerTab = (ImageView) convertView.findViewById(R.id.cornerTab); holder.favoriteIcon = (ImageView) convertView.findViewById(R.id.favoriteIcon); convertView.setTag(holder); }else{ holder = (ViewHolder)convertView.getTag(); } GalleryEntry entry = getItem(position); holder.cornerTab.setVisibility(View.GONE); holder.icon.setBackgroundResource(R.drawable.border_light_grey); if(entry.vote != null && !entry.vote.contentEquals("veto")) { if (entry.vote.contentEquals("up")) { holder.cornerTab.setVisibility(View.VISIBLE); holder.cornerTab.setImageResource(R.drawable.green_corner_tab); holder.icon.setBackgroundResource(R.drawable.border_green); } else if (entry.vote.contentEquals("down")) { holder.cornerTab.setVisibility(View.VISIBLE); holder.cornerTab.setImageResource(R.drawable.red_corner_tab); holder.icon.setBackgroundResource(R.drawable.border_red); } } holder.favoriteIcon.setVisibility(View.GONE); if(entry.favorite){ holder.favoriteIcon.setVisibility(View.VISIBLE); } ImageLoader.getInstance() .displayImage(getItem(position).thumbnail, holder.icon); /*if(position == getCount()-1){ Gallery.getInstance().addPage(new Gallery.GalleryLoadCallback() { @Override public void success(List<GalleryEntry> entries) { addAll(entries); } @Override public void failure(Exception exception) { Log.e(TAG, exception.getMessage(), exception); } @Override public void ignored(String reason) { Log.w(TAG, reason); } }); }*/ return convertView; } } }
package team122; import java.util.ArrayList; import java.util.Random; import team122.sys.Navigation; import team122.sys.Radio; import team122.sys.Weapons; import battlecode.common.Clock; import battlecode.common.Direction; import battlecode.common.GameActionException; import battlecode.common.GameConstants; import battlecode.common.MapLocation; import battlecode.common.Robot; import battlecode.common.RobotController; import battlecode.common.RobotType; import battlecode.common.Team; /** * Team JIMMY WU presents... * * @author zrneely baxe hongyis * */ public class RobotPlayer { private static Random randall = new Random(); private static Direction toEnemyHQ; private static Team us; private static Team them; private static MapLocation theirHQ; private static ArrayList<MapLocation> pathToTarget; private static boolean hasPath = false; private static int job = C.JOB_MOVE_ABOUT; // JOB-SPECIFIC VARIABLES // private static double mostRecentCowValue; private static MapLocation twinPastrLoc; // private static boolean shouldBuildNT = false; private static int enemyPastrCountWhenAttack; private static boolean needsSecondaryPath; // HQ specific variables private static int accountedPastrs; // Send a NT to shoot at it until we can // blow it up public static void run(RobotController rc) { theirHQ = rc.senseEnemyHQLocation(); // rallyPoint = LocationUtils.mlDivide(LocationUtils.mlAdd(theirHQ, // rc.senseHQLocation()), 2); toEnemyHQ = rc.senseHQLocation().directionTo(rc.senseEnemyHQLocation()); us = rc.getTeam(); them = us.opponent(); // You've taken all my entropy. CURSES randall.setSeed(rc.getRobot().getID() ^ rc.getControlBits() ^ randall.nextInt()); if(rc.getTeamMemory()[0] == 0x11) { // Use the dumb strategy if (rc.getType() == RobotType.HQ) { Direction spawnDir = Direction.NORTH; while (true) { try { if (rc.isActive()) { if (rc.canMove(spawnDir)) { rc.spawn(spawnDir); } spawnDir.rotateRight(); Weapons.shootNearby(rc, false); rc.yield(); } } catch (GameActionException ex) { ex.printStackTrace(); rc.yield(); } } } else if (rc.getType() == RobotType.NOISETOWER) { while (true) { try { if (rc.isActive()) { Weapons.noiseTowerHerdTowards(rc.senseHQLocation(), rc); rc.yield(); } } catch (GameActionException ex) { ex.printStackTrace(); rc.yield(); } } } else { // Construct! try { if (rc.getLocation().directionTo(rc.senseHQLocation()) .isDiagonal()) { rc.construct(RobotType.NOISETOWER); rc.yield(); } else { rc.construct(RobotType.PASTR); rc.yield(); } while(true){ rc.yield(); } } catch (GameActionException ex) { ex.printStackTrace(); rc.yield(); } } } else { // Use the real strategy if (rc.getType() == RobotType.HQ) { Navigation.initialize(rc); Radio.intitialize(rc); while (true) { try { spawnSoldier(rc); // if(!hasPath){ // pathToTarget = // Navigation.pathTo(rc.senseHQLocation().add(toEnemyHQ), // rallyPoint, 100000, true); // //rc.setIndicatorString(0, "path found"); // //Radio.broadcastPath(rc, pathToTarget); // hasPath = true; // // Let's do a quick sanity check // // rc.yield(); // let the broadcast finish // // ArrayList<MapLocation> test = // Radio.recievePath(rc); // // Navigation.printDirections(test); // // assert test.equals(pathToEnemyHQ); // } MapLocation l = Radio.isNeedPathFromHQ(rc); if (l != null && Radio.getTheirPastrCount(rc) >= 1 && !hasPath) { hasPath = false; pathToTarget = Navigation.pathTo(l, Navigation.findClosestEnemyPastr(rc), 100000, false); Radio.broadcastPath(rc, pathToTarget); Radio.setNeedPathFromHQ(rc, null); hasPath = true; } Radio.setOwnPastrCount(rc); Radio.setTheirPastrCount(rc); // The HQ can shoot too! But it can't move. Weapons.shootNearby(rc, false); rc.yield(); } catch (GameActionException ex) { ex.printStackTrace(); rc.yield(); } } } else if (rc.getType() == RobotType.SOLDIER) { while (true) { try { // Radio.maintainRobotMap(rc); ensureHasJob(rc); if (!hasPath && Radio.pathAvailable(rc)) { pathToTarget = Radio.recievePath(rc); Navigation.setGoal(pathToTarget.remove(pathToTarget .size() - 1)); hasPath = true; } doJob(rc); rc.yield(); } catch (GameActionException ex) { ex.printStackTrace(); rc.yield(); } } } else if (rc.getType() == RobotType.NOISETOWER) { twinPastrLoc = rc.getLocation(); try { for (Robot r : rc .senseNearbyGameObjects(Robot.class, 2, us)) { if (rc.senseRobotInfo(r).type == RobotType.PASTR) { twinPastrLoc = rc.senseRobotInfo(r).location; break; } } } catch (GameActionException e) { // TODO Auto-generated catch block e.printStackTrace(); } while (true) { if (rc.isActive()) { try { if (Weapons.noiseTowerHerdTowards(twinPastrLoc, rc)) { rc.yield(); continue; } } catch (GameActionException e) { // TODO Auto-generated catch block e.printStackTrace(); } } rc.yield(); } } else if (rc.getType() == RobotType.PASTR) { while (true) { rc.yield(); } } } } // private static MapLocation getOptimalPastrLoc(RobotController rc, // MapLocation[] candidates) { // MapLocation optimal = null; // int maxScore = 0; // int score = 0; // for(MapLocation l : candidates){ // score = 0; // score += Navigation.cowMap[l.x][l.y] * 30.0; // More cows = more good // score += l.distanceSquaredTo(theirHQ) * 0.1; // Far away = good // score -= Navigation.adjacentWalls(rc, l) * 2.0; // Walls next to us = bad // if(score > maxScore){ // optimal = l; // maxScore = score; // } // } // System.out.println("Optimal PASTR location: " + // LocationUtils.locationToInt(optimal) + " with score " + maxScore); // return optimal == null ? rc.senseHQLocation() : optimal; // } private static void spawnSoldier(RobotController rc) throws GameActionException { if (rc.isActive() && rc.senseRobotCount() < GameConstants.MAX_ROBOTS) { boolean foundSpot = false; Direction spawnDir = toEnemyHQ; for (int i : C.ROT_TURNS) { for (int j = 0; j < i; j++) { spawnDir = spawnDir.rotateLeft(); } if (rc.canMove(spawnDir)) { foundSpot = true; break; } } if (foundSpot) { rc.spawn(spawnDir); } // The first robot should look for a good spot -zrneely if (rc.senseRobotCount() == 0) { Radio.setJobNeeded(rc, C.JOB_FIND_PASTR_LOC); } // The second robot should follow it and build a noisetower else if (rc.senseRobotCount() == 1) { Radio.setJobNeeded(rc, C.JOB_FOLLOW_AND_BUILD_NOISETOWER); } // else if(rc.sensePastrLocations(them).length > accountedPastrs){ // Radio.setJobNeeded(rc, C.JOB_BUILD_NOISETOWER_FOR_PASTR); // Radio.setNTPastrTarget(rc, getNewPastrLoc()); // TODO // accountedPastrs++; // } // All other robots should swarm there, for now else { Radio.setJobNeeded(rc, C.JOB_SWARM_AT_INITIAL_PASTR); } } } private static void ensureHasJob(RobotController rc) throws GameActionException { byte needed = Radio.getJobNeeded(rc); if (job == C.JOB_MOVE_ABOUT && needed != C.JOB_MOVE_ABOUT) { // Radio.setJob(rc, needed); job = needed; Radio.setJobNeeded(rc, (byte) -1); // nextJob = C.JOB_MOVE_ABOUT; return; } } private static void doJob(RobotController rc) throws GameActionException { if (!rc.isActive()) return; switch (job) { case C.JOB_ATTACK_IN_SQUAD: { rc.setIndicatorString(0, "attack in squad"); squadAttack(rc); break; } case C.JOB_DEFEND_PASTR: { rc.setIndicatorString(0, "defend pastr"); defendPastr(rc); break; } case C.JOB_MOVE_ABOUT: { rc.setIndicatorString(0, "move randomly"); Direction d = Direction.values()[randall.nextInt(8)]; if (rc.canMove(d)) { rc.move(d); } break; } case C.JOB_SELF_DESTRUCT: { rc.setIndicatorString(0, "self-destruct"); rc.selfDestruct(); break; } case C.JOB_FIND_PASTR_LOC: { rc.setIndicatorString(0, "find pastr loc"); // double adjCows = 0.0d; // double cowCount = rc.senseCowsAtLocation(rc.getLocation()); // double mrcv = mostRecentCowValue; // mostRecentCowValue = cowCount; // if(Weapons.shootNearby(rc, true)){ // return; // Moved already, we're done, but still record the cow // count // } // adjCows = cowCount + (Clock.getRoundNum() * 2); // // On a wall = bad (harder to mine) // int adjWal = Navigation.adjacentWalls(rc, rc.getLocation()); // adjCows -= adjWal * 50; // any nearby enemies = bad too TODO // if(adjCows > C.MIN_ADJUSTED_COWS_AROUND_PASTR){ // rc.construct(RobotType.PASTR); // } Radio.setInitialPastrLocation(rc, rc.getLocation()); int d = Navigation.directionToMostCows(rc); if (Math.abs(d) == Direction.OMNI.ordinal() && rc.senseCowsAtLocation(rc.getLocation()) > C.MIN_COWS) { Radio.setInitialPastrReady(rc); rc.construct(RobotType.PASTR); return; } if (d < 0) { Navigation.tryToSneak(rc, Direction.values()[-d]); } else { Navigation.tryToMove(rc, Direction.values()[d]); } // TODO break; } case C.JOB_FOLLOW_AND_BUILD_NOISETOWER: { rc.setIndicatorString(0, "follow and build nt"); if (Radio.isInitialPastrReady(rc) && rc.getLocation().distanceSquaredTo( Radio.getInitialPastrLocation(rc)) <= 1) { rc.construct(RobotType.NOISETOWER); return; } Navigation.tryToMove( rc, rc.getLocation().directionTo( Radio.getInitialPastrLocation(rc))); break; } case C.JOB_SWARM_AT_INITIAL_PASTR: { rc.setIndicatorString(0, "swarm at initial pastr"); if (!Weapons.shootNearby(rc)) { Navigation.tryToMove( rc, rc.getLocation().directionTo( Radio.getInitialPastrLocation(rc)), true); } rc.setIndicatorString(1, shouldAttack(rc) ? "attack" : "noattack"); if (shouldAttack(rc)) { Radio.setNeedPathFromHQ(rc, rc.getLocation()); job = C.JOB_ATTACK_IN_SQUAD; enemyPastrCountWhenAttack = Radio.getTheirPastrCount(rc); Navigation.setGoal(Navigation.findClosestEnemyPastr(rc)); } break; } } } private static boolean shouldAttack(RobotController rc) throws GameActionException { int robotCount = rc.senseRobotCount() - 5; // TODO not necessarily 5 int ownPastrCount = Radio.getOwnPastrCount(rc); int enemyPastrCount = Radio.getTheirPastrCount(rc); if (robotCount <= C.MIN_ROBOTS_FOR_SWARM) { // if team has <=3 robots, // it wont swarm return false; } else if (ownPastrCount > 1) { // if we have enough robots and I have // any pastrs build, swarm return true; } else if (enemyPastrCount == 0) { // if I dont have pastrs and he // doesn't have pastrs, don't swarm // unless return (robotCount >= 17 || 4 * Clock.getRoundNum() >= 3 * GameConstants.ROUND_MAX_LIMIT); } else { // given I dont have pastrs, he has pastrs, swarm if they have // a milk quantity or I am above necessary swarm count return (robotCount > C.MIN_ROBOTS_FOR_SWARM || rc .senseTeamMilkQuantity(them) > GameConstants.WIN_QTY / 8); } } private static void defendPastr(RobotController rc) { // TODO } private static boolean hasSentRequest = false; private static void squadAttack(RobotController rc) throws GameActionException { // First, shoot things if (Weapons.shootNearby(rc)) // We already took an action this turn, we're done. return; if (!hasPath && Radio.pathAvailable(rc)) { pathToTarget = Radio.recievePath(rc); enemyPastrCountWhenAttack = Radio.getTheirPastrCount(rc); hasPath = true; hasSentRequest = false; } // We might have killed it // if(hasPath & (Radio.getTheirPastrCount(rc) < // enemyPastrCountWhenAttack)){ // if(!hasSentRequest && (Radio.getTheirPastrCount(rc) < // enemyPastrCountWhenAttack)){ // Radio.setNeedPathFromHQ(rc, rc.getLocation()); // hasSentRequest = true; // hasPath = false; // } // // Follow the path, with a little randomness, actually no randomness // Direction move = Navigation.getNextDirection(rc, pathToTarget); // //Direction move = // Direction.values()[(Navigation.getNextDirection(rc, pathToEnemyHQ, // C.COARSENESS).ordinal() + 7 + randall.nextInt(2)) % 8]; // if(move == Direction.NONE); // return; // } Direction move = Navigation.getNextDirection(rc, /* * hasPath ? * pathToTarget : null */null); // Move // directly // to the // goal if (move == Direction.NONE) return; Navigation.tryToMove(rc, move); } }
package org.keycloak.federation.ldap; import org.jboss.logging.Logger; import org.keycloak.Config; import org.keycloak.federation.kerberos.CommonKerberosConfig; import org.keycloak.federation.kerberos.impl.KerberosServerSubjectAuthenticator; import org.keycloak.federation.kerberos.impl.KerberosUsernamePasswordAuthenticator; import org.keycloak.federation.kerberos.impl.SPNEGOAuthenticator; import org.keycloak.federation.ldap.idm.model.LDAPObject; import org.keycloak.federation.ldap.idm.query.Condition; import org.keycloak.federation.ldap.idm.query.QueryParameter; import org.keycloak.federation.ldap.idm.query.internal.LDAPQuery; import org.keycloak.federation.ldap.idm.query.internal.LDAPQueryConditionsBuilder; import org.keycloak.federation.ldap.idm.store.ldap.LDAPIdentityStore; import org.keycloak.federation.ldap.mappers.FullNameLDAPFederationMapper; import org.keycloak.federation.ldap.mappers.FullNameLDAPFederationMapperFactory; import org.keycloak.federation.ldap.mappers.LDAPFederationMapper; import org.keycloak.federation.ldap.mappers.UserAttributeLDAPFederationMapper; import org.keycloak.federation.ldap.mappers.UserAttributeLDAPFederationMapperFactory; import org.keycloak.models.KeycloakSession; import org.keycloak.models.KeycloakSessionFactory; import org.keycloak.models.KeycloakSessionTask; import org.keycloak.models.LDAPConstants; import org.keycloak.models.ModelDuplicateException; import org.keycloak.models.ModelException; import org.keycloak.models.RealmModel; import org.keycloak.models.UserFederationEventAwareProviderFactory; import org.keycloak.models.UserFederationMapperModel; import org.keycloak.models.UserFederationProvider; import org.keycloak.models.UserFederationProviderModel; import org.keycloak.models.UserFederationSyncResult; import org.keycloak.models.UserModel; import org.keycloak.models.utils.KeycloakModelUtils; import java.util.Collections; import java.util.Date; import java.util.List; import java.util.Set; /** * @author <a href="mailto:mposolda@redhat.com">Marek Posolda</a> * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class LDAPFederationProviderFactory extends UserFederationEventAwareProviderFactory { private static final Logger logger = Logger.getLogger(LDAPFederationProviderFactory.class); public static final String PROVIDER_NAME = LDAPConstants.LDAP_PROVIDER; private LDAPIdentityStoreRegistry ldapStoreRegistry; @Override public UserFederationProvider create(KeycloakSession session) { throw new IllegalAccessError("Illegal to call this method"); } @Override public LDAPFederationProvider getInstance(KeycloakSession session, UserFederationProviderModel model) { LDAPIdentityStore ldapIdentityStore = this.ldapStoreRegistry.getLdapStore(model); return new LDAPFederationProvider(this, session, model, ldapIdentityStore); } @Override public void init(Config.Scope config) { this.ldapStoreRegistry = new LDAPIdentityStoreRegistry(); } @Override public void close() { this.ldapStoreRegistry = null; } @Override public String getId() { return PROVIDER_NAME; } @Override public Set<String> getConfigurationOptions() { return Collections.emptySet(); } // Best effort to create appropriate mappers according to our LDAP config @Override public void onProviderModelCreated(RealmModel realm, UserFederationProviderModel newProviderModel) { LDAPConfig ldapConfig = new LDAPConfig(newProviderModel.getConfig()); boolean activeDirectory = ldapConfig.isActiveDirectory(); UserFederationProvider.EditMode editMode = ldapConfig.getEditMode(); String readOnly = String.valueOf(editMode == UserFederationProvider.EditMode.READ_ONLY || editMode == UserFederationProvider.EditMode.UNSYNCED); String usernameLdapAttribute = ldapConfig.getUsernameLdapAttribute(); String alwaysReadValueFromLDAP = String.valueOf(editMode==UserFederationProvider.EditMode.READ_ONLY || editMode== UserFederationProvider.EditMode.WRITABLE); UserFederationMapperModel mapperModel; mapperModel = KeycloakModelUtils.createUserFederationMapperModel("username", newProviderModel.getId(), UserAttributeLDAPFederationMapperFactory.PROVIDER_ID, UserAttributeLDAPFederationMapper.USER_MODEL_ATTRIBUTE, UserModel.USERNAME, UserAttributeLDAPFederationMapper.LDAP_ATTRIBUTE, usernameLdapAttribute, UserAttributeLDAPFederationMapper.READ_ONLY, readOnly, UserAttributeLDAPFederationMapper.ALWAYS_READ_VALUE_FROM_LDAP, "false", UserAttributeLDAPFederationMapper.IS_MANDATORY_IN_LDAP, "true"); realm.addUserFederationMapper(mapperModel); // CN is typically used as RDN for Active Directory deployments if (ldapConfig.getRdnLdapAttribute().equalsIgnoreCase(LDAPConstants.CN)) { if (usernameLdapAttribute.equalsIgnoreCase(LDAPConstants.CN)) { // For AD deployments with "cn" as username, we will map "givenName" to first name mapperModel = KeycloakModelUtils.createUserFederationMapperModel("first name", newProviderModel.getId(), UserAttributeLDAPFederationMapperFactory.PROVIDER_ID, UserAttributeLDAPFederationMapper.USER_MODEL_ATTRIBUTE, UserModel.FIRST_NAME, UserAttributeLDAPFederationMapper.LDAP_ATTRIBUTE, LDAPConstants.GIVENNAME, UserAttributeLDAPFederationMapper.READ_ONLY, readOnly, UserAttributeLDAPFederationMapper.ALWAYS_READ_VALUE_FROM_LDAP, alwaysReadValueFromLDAP, UserAttributeLDAPFederationMapper.IS_MANDATORY_IN_LDAP, "true"); realm.addUserFederationMapper(mapperModel); } else { if (editMode == UserFederationProvider.EditMode.WRITABLE) { // For AD deployments with "sAMAccountName" as username and writable, we need to map "cn" as username as well (this is needed so we can register new users from KC into LDAP) and we will map "givenName" to first name. mapperModel = KeycloakModelUtils.createUserFederationMapperModel("first name", newProviderModel.getId(), UserAttributeLDAPFederationMapperFactory.PROVIDER_ID, UserAttributeLDAPFederationMapper.USER_MODEL_ATTRIBUTE, UserModel.FIRST_NAME, UserAttributeLDAPFederationMapper.LDAP_ATTRIBUTE, LDAPConstants.GIVENNAME, UserAttributeLDAPFederationMapper.READ_ONLY, readOnly, UserAttributeLDAPFederationMapper.ALWAYS_READ_VALUE_FROM_LDAP, alwaysReadValueFromLDAP, UserAttributeLDAPFederationMapper.IS_MANDATORY_IN_LDAP, "true"); realm.addUserFederationMapper(mapperModel); mapperModel = KeycloakModelUtils.createUserFederationMapperModel("username-cn", newProviderModel.getId(), UserAttributeLDAPFederationMapperFactory.PROVIDER_ID, UserAttributeLDAPFederationMapper.USER_MODEL_ATTRIBUTE, UserModel.USERNAME, UserAttributeLDAPFederationMapper.LDAP_ATTRIBUTE, LDAPConstants.CN, UserAttributeLDAPFederationMapper.READ_ONLY, readOnly, UserAttributeLDAPFederationMapper.ALWAYS_READ_VALUE_FROM_LDAP, "false", UserAttributeLDAPFederationMapper.IS_MANDATORY_IN_LDAP, "true"); realm.addUserFederationMapper(mapperModel); } else { // For read-only LDAP, we map "cn" as full name mapperModel = KeycloakModelUtils.createUserFederationMapperModel("full name", newProviderModel.getId(), FullNameLDAPFederationMapperFactory.PROVIDER_ID, FullNameLDAPFederationMapper.LDAP_FULL_NAME_ATTRIBUTE, LDAPConstants.CN, UserAttributeLDAPFederationMapper.READ_ONLY, readOnly); realm.addUserFederationMapper(mapperModel); } } } else { mapperModel = KeycloakModelUtils.createUserFederationMapperModel("first name", newProviderModel.getId(), UserAttributeLDAPFederationMapperFactory.PROVIDER_ID, UserAttributeLDAPFederationMapper.USER_MODEL_ATTRIBUTE, UserModel.FIRST_NAME, UserAttributeLDAPFederationMapper.LDAP_ATTRIBUTE, LDAPConstants.CN, UserAttributeLDAPFederationMapper.READ_ONLY, readOnly, UserAttributeLDAPFederationMapper.ALWAYS_READ_VALUE_FROM_LDAP, alwaysReadValueFromLDAP, UserAttributeLDAPFederationMapper.IS_MANDATORY_IN_LDAP, "true"); realm.addUserFederationMapper(mapperModel); } mapperModel = KeycloakModelUtils.createUserFederationMapperModel("last name", newProviderModel.getId(), UserAttributeLDAPFederationMapperFactory.PROVIDER_ID, UserAttributeLDAPFederationMapper.USER_MODEL_ATTRIBUTE, UserModel.LAST_NAME, UserAttributeLDAPFederationMapper.LDAP_ATTRIBUTE, LDAPConstants.SN, UserAttributeLDAPFederationMapper.READ_ONLY, readOnly, UserAttributeLDAPFederationMapper.ALWAYS_READ_VALUE_FROM_LDAP, alwaysReadValueFromLDAP, UserAttributeLDAPFederationMapper.IS_MANDATORY_IN_LDAP, "true"); realm.addUserFederationMapper(mapperModel); mapperModel = KeycloakModelUtils.createUserFederationMapperModel("email", newProviderModel.getId(), UserAttributeLDAPFederationMapperFactory.PROVIDER_ID, UserAttributeLDAPFederationMapper.USER_MODEL_ATTRIBUTE, UserModel.EMAIL, UserAttributeLDAPFederationMapper.LDAP_ATTRIBUTE, LDAPConstants.EMAIL, UserAttributeLDAPFederationMapper.READ_ONLY, readOnly, UserAttributeLDAPFederationMapper.ALWAYS_READ_VALUE_FROM_LDAP, "false", UserAttributeLDAPFederationMapper.IS_MANDATORY_IN_LDAP, "false"); realm.addUserFederationMapper(mapperModel); String createTimestampLdapAttrName = activeDirectory ? "whenCreated" : LDAPConstants.CREATE_TIMESTAMP; String modifyTimestampLdapAttrName = activeDirectory ? "whenChanged" : LDAPConstants.MODIFY_TIMESTAMP; // map createTimeStamp as read-only mapperModel = KeycloakModelUtils.createUserFederationMapperModel("creation date", newProviderModel.getId(), UserAttributeLDAPFederationMapperFactory.PROVIDER_ID, UserAttributeLDAPFederationMapper.USER_MODEL_ATTRIBUTE, LDAPConstants.CREATE_TIMESTAMP, UserAttributeLDAPFederationMapper.LDAP_ATTRIBUTE, createTimestampLdapAttrName, UserAttributeLDAPFederationMapper.READ_ONLY, "true", UserAttributeLDAPFederationMapper.ALWAYS_READ_VALUE_FROM_LDAP, alwaysReadValueFromLDAP, UserAttributeLDAPFederationMapper.IS_MANDATORY_IN_LDAP, "false"); realm.addUserFederationMapper(mapperModel); // map modifyTimeStamp as read-only mapperModel = KeycloakModelUtils.createUserFederationMapperModel("modify date", newProviderModel.getId(), UserAttributeLDAPFederationMapperFactory.PROVIDER_ID, UserAttributeLDAPFederationMapper.USER_MODEL_ATTRIBUTE, LDAPConstants.MODIFY_TIMESTAMP, UserAttributeLDAPFederationMapper.LDAP_ATTRIBUTE, modifyTimestampLdapAttrName, UserAttributeLDAPFederationMapper.READ_ONLY, "true", UserAttributeLDAPFederationMapper.ALWAYS_READ_VALUE_FROM_LDAP, alwaysReadValueFromLDAP, UserAttributeLDAPFederationMapper.IS_MANDATORY_IN_LDAP, "false"); realm.addUserFederationMapper(mapperModel); } @Override public UserFederationSyncResult syncAllUsers(KeycloakSessionFactory sessionFactory, final String realmId, final UserFederationProviderModel model) { logger.infof("Sync all users from LDAP to local store: realm: %s, federation provider: %s", realmId, model.getDisplayName()); LDAPQuery userQuery = createQuery(sessionFactory, realmId, model); UserFederationSyncResult syncResult = syncImpl(sessionFactory, userQuery, realmId, model); // TODO: Remove all existing keycloak users, which have federation links, but are not in LDAP. Perhaps don't check users, which were just added or updated during this sync? logger.infof("Sync all users finished: %s", syncResult.getStatus()); return syncResult; } @Override public UserFederationSyncResult syncChangedUsers(KeycloakSessionFactory sessionFactory, String realmId, UserFederationProviderModel model, Date lastSync) { logger.infof("Sync changed users from LDAP to local store: realm: %s, federation provider: %s, last sync time: " + lastSync, realmId, model.getDisplayName()); // Sync newly created and updated users LDAPQueryConditionsBuilder conditionsBuilder = new LDAPQueryConditionsBuilder(); Condition createCondition = conditionsBuilder.greaterThanOrEqualTo(new QueryParameter(LDAPConstants.CREATE_TIMESTAMP), lastSync); Condition modifyCondition = conditionsBuilder.greaterThanOrEqualTo(new QueryParameter(LDAPConstants.MODIFY_TIMESTAMP), lastSync); Condition orCondition = conditionsBuilder.orCondition(createCondition, modifyCondition); LDAPQuery userQuery = createQuery(sessionFactory, realmId, model); userQuery.where(orCondition); UserFederationSyncResult result = syncImpl(sessionFactory, userQuery, realmId, model); logger.infof("Sync changed users finished: %s", result.getStatus()); return result; } protected UserFederationSyncResult syncImpl(KeycloakSessionFactory sessionFactory, LDAPQuery userQuery, final String realmId, final UserFederationProviderModel fedModel) { final UserFederationSyncResult syncResult = new UserFederationSyncResult(); boolean pagination = Boolean.parseBoolean(fedModel.getConfig().get(LDAPConstants.PAGINATION)); if (pagination) { String pageSizeConfig = fedModel.getConfig().get(LDAPConstants.BATCH_SIZE_FOR_SYNC); int pageSize = pageSizeConfig!=null ? Integer.parseInt(pageSizeConfig) : LDAPConstants.DEFAULT_BATCH_SIZE_FOR_SYNC; boolean nextPage = true; while (nextPage) { userQuery.setLimit(pageSize); final List<LDAPObject> users = userQuery.getResultList(); nextPage = userQuery.getPaginationContext() != null; UserFederationSyncResult currentPageSync = importLdapUsers(sessionFactory, realmId, fedModel, users); syncResult.add(currentPageSync); } } else { // LDAP pagination not available. Do everything in single transaction final List<LDAPObject> users = userQuery.getResultList(); UserFederationSyncResult currentSync = importLdapUsers(sessionFactory, realmId, fedModel, users); syncResult.add(currentSync); } return syncResult; } private LDAPQuery createQuery(KeycloakSessionFactory sessionFactory, final String realmId, final UserFederationProviderModel model) { class QueryHolder { LDAPQuery query; } final QueryHolder queryHolder = new QueryHolder(); KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { LDAPFederationProvider ldapFedProvider = getInstance(session, model); RealmModel realm = session.realms().getRealm(realmId); queryHolder.query = LDAPUtils.createQueryForUserSearch(ldapFedProvider, realm); } }); return queryHolder.query; } protected UserFederationSyncResult importLdapUsers(KeycloakSessionFactory sessionFactory, final String realmId, final UserFederationProviderModel fedModel, List<LDAPObject> ldapUsers) { final UserFederationSyncResult syncResult = new UserFederationSyncResult(); class BooleanHolder { private boolean value = true; } final BooleanHolder exists = new BooleanHolder(); for (final LDAPObject ldapUser : ldapUsers) { try { // Process each user in it's own transaction to avoid global fail KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { LDAPFederationProvider ldapFedProvider = getInstance(session, fedModel); RealmModel currentRealm = session.realms().getRealm(realmId); String username = LDAPUtils.getUsername(ldapUser, ldapFedProvider.getLdapIdentityStore().getConfig()); UserModel currentUser = session.userStorage().getUserByUsername(username, currentRealm); if (currentUser == null) { // Add new user to Keycloak exists.value = false; ldapFedProvider.importUserFromLDAP(session, currentRealm, ldapUser); syncResult.increaseAdded(); } else { if ((fedModel.getId().equals(currentUser.getFederationLink())) && (ldapUser.getUuid().equals(currentUser.getFirstAttribute(LDAPConstants.LDAP_ID)))) { // Update keycloak user Set<UserFederationMapperModel> federationMappers = currentRealm.getUserFederationMappersByFederationProvider(fedModel.getId()); for (UserFederationMapperModel mapperModel : federationMappers) { LDAPFederationMapper ldapMapper = ldapFedProvider.getMapper(mapperModel); ldapMapper.onImportUserFromLDAP(mapperModel, ldapFedProvider, ldapUser, currentUser, currentRealm, false); } logger.debugf("Updated user from LDAP: %s", currentUser.getUsername()); syncResult.increaseUpdated(); } else { logger.warnf("User '%s' is not updated during sync as he already exists in Keycloak database but is not linked to federation provider '%s'", username, fedModel.getDisplayName()); syncResult.increaseFailed(); } } } }); } catch (ModelException me) { logger.error("Failed during import user from LDAP", me); syncResult.increaseFailed(); // Remove user if we already added him during this transaction if (!exists.value) { KeycloakModelUtils.runJobInTransaction(sessionFactory, new KeycloakSessionTask() { @Override public void run(KeycloakSession session) { LDAPFederationProvider ldapFedProvider = getInstance(session, fedModel); RealmModel currentRealm = session.realms().getRealm(realmId); String username = LDAPUtils.getUsername(ldapUser, ldapFedProvider.getLdapIdentityStore().getConfig()); UserModel existing = session.userStorage().getUserByUsername(username, currentRealm); if (existing != null) { session.userStorage().removeUser(currentRealm, existing); } } }); } } } return syncResult; } protected SPNEGOAuthenticator createSPNEGOAuthenticator(String spnegoToken, CommonKerberosConfig kerberosConfig) { KerberosServerSubjectAuthenticator kerberosAuth = createKerberosSubjectAuthenticator(kerberosConfig); return new SPNEGOAuthenticator(kerberosConfig, kerberosAuth, spnegoToken); } protected KerberosServerSubjectAuthenticator createKerberosSubjectAuthenticator(CommonKerberosConfig kerberosConfig) { return new KerberosServerSubjectAuthenticator(kerberosConfig); } protected KerberosUsernamePasswordAuthenticator createKerberosUsernamePasswordAuthenticator(CommonKerberosConfig kerberosConfig) { return new KerberosUsernamePasswordAuthenticator(kerberosConfig); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.kubernetes.cluster; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import io.fabric8.kubernetes.api.model.ConfigMapBuilder; import io.fabric8.kubernetes.api.model.coordination.v1.LeaseBuilder; import org.apache.camel.cluster.CamelPreemptiveClusterService; import org.apache.camel.component.kubernetes.KubernetesConfiguration; import org.apache.camel.component.kubernetes.cluster.utils.ConfigMapLockSimulator; import org.apache.camel.component.kubernetes.cluster.utils.LeaderRecorder; import org.apache.camel.component.kubernetes.cluster.utils.LeaseLockSimulator; import org.apache.camel.component.kubernetes.cluster.utils.LockTestServer; import org.apache.camel.component.kubernetes.cluster.utils.ResourceLockSimulator; import org.apache.camel.support.cluster.RebalancingCamelClusterService; import org.apache.camel.test.junit5.CamelTestSupport; import org.awaitility.Awaitility; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.EnumSource; import org.junit.jupiter.params.provider.MethodSource; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test leader election scenarios using a mock server. */ public class KubernetesClusterServiceTest extends CamelTestSupport { private static final int LEASE_TIME_MILLIS = 2000; private static final int RENEW_DEADLINE_MILLIS = 1000; private static final int RETRY_PERIOD_MILLIS = 200; private static final double JITTER_FACTOR = 1.1; private ConfigMapLockSimulator configMapLockSimulator; private Map<String, LeaseLockSimulator> leaseLockSimulators = new HashMap<>(); private Map<String, LockTestServer<?>> lockServers = new HashMap<>(); private Map<String, CamelPreemptiveClusterService> clusterServices = new HashMap<>(); @AfterEach public void shutdownLock() { for (LockTestServer<?> server : this.lockServers.values()) { try { server.destroy(); } catch (Exception e) { // can happen in case of delay } } this.lockServers = new HashMap<>(); configMapLockSimulator = null; leaseLockSimulators = new HashMap<>(); clusterServices = new HashMap<>(); } @ParameterizedTest @EnumSource(LeaseResourceType.class) public void testSimpleLeaderElection(LeaseResourceType type) throws Exception { LeaderRecorder mypod1 = addMember("mypod1", type); LeaderRecorder mypod2 = addMember("mypod2", type); context.start(); mypod1.waitForAnyLeader(5, TimeUnit.SECONDS); mypod2.waitForAnyLeader(5, TimeUnit.SECONDS); String leader = mypod1.getCurrentLeader(); assertNotNull(leader); assertTrue(leader.startsWith("mypod")); assertEquals(mypod2.getCurrentLeader(), leader, "Leaders should be equals"); } @ParameterizedTest @EnumSource(LeaseResourceType.class) public void testMultipleMembersLeaderElection(LeaseResourceType type) throws Exception { int number = 5; List<LeaderRecorder> members = IntStream.range(0, number).mapToObj(i -> addMember("mypod" + i, type)).collect(Collectors.toList()); context.start(); for (LeaderRecorder member : members) { member.waitForAnyLeader(5, TimeUnit.SECONDS); } Set<String> leaders = members.stream().map(LeaderRecorder::getCurrentLeader).collect(Collectors.toSet()); assertEquals(1, leaders.size()); String leader = leaders.iterator().next(); assertTrue(leader.startsWith("mypod")); } @Test public void testSimpleLeaderElectionWithExistingConfigMap() throws Exception { this.configMapLockSimulator = new ConfigMapLockSimulator("leaders"); configMapLockSimulator.setResource(new ConfigMapBuilder().withNewMetadata().withName("leaders").and().build(), true); LeaderRecorder mypod1 = addMember("mypod1", LeaseResourceType.ConfigMap); LeaderRecorder mypod2 = addMember("mypod2", LeaseResourceType.ConfigMap); context.start(); mypod1.waitForAnyLeader(10, TimeUnit.SECONDS); mypod2.waitForAnyLeader(10, TimeUnit.SECONDS); String leader = mypod1.getCurrentLeader(); assertTrue(leader.startsWith("mypod")); assertEquals(mypod2.getCurrentLeader(), leader, "Leaders should be equals"); } @Test public void testSimpleLeaderElectionWithExistingLeases() throws Exception { LeaseLockSimulator simulator = new LeaseLockSimulator("leaders-mygroup"); simulator.setResource(new LeaseBuilder() .withNewMetadata().withName("leaders-mygroup") .and() .build(), true); this.leaseLockSimulators.put("mygroup", simulator); LeaderRecorder mypod1 = addMember("mypod1", "mygroup", LeaseResourceType.Lease); LeaderRecorder mypod2 = addMember("mypod2", "mygroup", LeaseResourceType.Lease); context.start(); mypod1.waitForAnyLeader(10, TimeUnit.SECONDS); mypod2.waitForAnyLeader(10, TimeUnit.SECONDS); String leader = mypod1.getCurrentLeader(); assertTrue(leader.startsWith("mypod")); assertEquals(mypod2.getCurrentLeader(), leader, "Leaders should be equals"); } @ParameterizedTest @EnumSource(LeaseResourceType.class) public void testLeadershipLoss(LeaseResourceType type) throws Exception { LeaderRecorder mypod1 = addMember("mypod1", type); LeaderRecorder mypod2 = addMember("mypod2", type); context.start(); mypod1.waitForAnyLeader(5, TimeUnit.SECONDS); mypod2.waitForAnyLeader(5, TimeUnit.SECONDS); String firstLeader = mypod1.getCurrentLeader(); LeaderRecorder formerLeaderRecorder = firstLeader.equals("mypod1") ? mypod1 : mypod2; LeaderRecorder formerLoserRecorder = firstLeader.equals("mypod1") ? mypod2 : mypod1; refuseRequestsFromPod(firstLeader); disconnectPod(firstLeader); formerLeaderRecorder.waitForALeaderChange(7, TimeUnit.SECONDS); formerLoserRecorder.waitForANewLeader(firstLeader, 7, TimeUnit.SECONDS); String secondLeader = formerLoserRecorder.getCurrentLeader(); assertNotEquals(firstLeader, secondLeader, "The firstLeader should be different from the new one"); Long lossTimestamp = formerLeaderRecorder.getLastTimeOf(l -> l == null); Long gainTimestamp = formerLoserRecorder.getLastTimeOf(secondLeader::equals); assertTrue(gainTimestamp >= lossTimestamp + (LEASE_TIME_MILLIS - RENEW_DEADLINE_MILLIS) / 2, "At least half distance must elapse from leadership loss and regain (see renewDeadlineSeconds)"); checkLeadershipChangeDistance((LEASE_TIME_MILLIS - RENEW_DEADLINE_MILLIS) / 2, TimeUnit.MILLISECONDS, mypod1, mypod2); } @ParameterizedTest @EnumSource(LeaseResourceType.class) public void testSlowLeaderLosingLeadershipOnlyInternally(LeaseResourceType type) throws Exception { LeaderRecorder mypod1 = addMember("mypod1", type); LeaderRecorder mypod2 = addMember("mypod2", type); context.start(); mypod1.waitForAnyLeader(5, TimeUnit.SECONDS); mypod2.waitForAnyLeader(5, TimeUnit.SECONDS); String firstLeader = mypod1.getCurrentLeader(); LeaderRecorder formerLeaderRecorder = firstLeader.equals("mypod1") ? mypod1 : mypod2; LeaderRecorder formerLoserRecorder = firstLeader.equals("mypod1") ? mypod2 : mypod1; delayRequestsFromPod(firstLeader, 10, TimeUnit.SECONDS); Thread.sleep(LEASE_TIME_MILLIS); assertNull(formerLeaderRecorder.getCurrentLeader()); assertEquals(firstLeader, formerLoserRecorder.getCurrentLeader()); } @ParameterizedTest @EnumSource(LeaseResourceType.class) public void testRecoveryAfterFailure(LeaseResourceType type) throws Exception { LeaderRecorder mypod1 = addMember("mypod1", type); LeaderRecorder mypod2 = addMember("mypod2", type); context.start(); mypod1.waitForAnyLeader(5, TimeUnit.SECONDS); mypod2.waitForAnyLeader(5, TimeUnit.SECONDS); String firstLeader = mypod1.getCurrentLeader(); for (int i = 0; i < 3; i++) { refuseRequestsFromPod(firstLeader); Thread.sleep(RENEW_DEADLINE_MILLIS); allowRequestsFromPod(firstLeader); Thread.sleep(LEASE_TIME_MILLIS); } assertEquals(firstLeader, mypod1.getCurrentLeader()); assertEquals(firstLeader, mypod2.getCurrentLeader()); } @Test public void testSharedConfigMap() throws Exception { LeaderRecorder a1 = addMember("a1", LeaseResourceType.ConfigMap); LeaderRecorder a2 = addMember("a2", LeaseResourceType.ConfigMap); LeaderRecorder b1 = addMember("b1", "app2", LeaseResourceType.ConfigMap); LeaderRecorder b2 = addMember("b2", "app2", LeaseResourceType.ConfigMap); context.start(); a1.waitForAnyLeader(5, TimeUnit.SECONDS); a2.waitForAnyLeader(5, TimeUnit.SECONDS); b1.waitForAnyLeader(5, TimeUnit.SECONDS); b2.waitForAnyLeader(5, TimeUnit.SECONDS); assertNotNull(a1.getCurrentLeader()); assertTrue(a1.getCurrentLeader().startsWith("a")); assertEquals(a1.getCurrentLeader(), a2.getCurrentLeader()); assertNotNull(b1.getCurrentLeader()); assertTrue(b1.getCurrentLeader().startsWith("b")); assertEquals(b1.getCurrentLeader(), b2.getCurrentLeader()); assertNotEquals(a1.getCurrentLeader(), b2.getCurrentLeader()); } static Stream<Arguments> rebalancingProvider() { return Stream.of( // LeaseResourceType, pods, partitions, expected partitions owned, tolerance on owned partitions Arguments.of(LeaseResourceType.Lease, 4, 2, 0, 1), Arguments.of(LeaseResourceType.Lease, 1, 2, 2, 0), Arguments.of(LeaseResourceType.Lease, 2, 2, 1, 0), Arguments.of(LeaseResourceType.ConfigMap, 3, 10, 3, 1), Arguments.of(LeaseResourceType.Lease, 3, 10, 3, 1), Arguments.of(LeaseResourceType.ConfigMap, 6, 23, 3, 1), Arguments.of(LeaseResourceType.Lease, 6, 23, 3, 1)); } @ParameterizedTest @MethodSource("rebalancingProvider") public void testRebalancing(LeaseResourceType type, int pods, int partitions, int expectedPartitionsPerPod, int tolerance) throws Exception { Map<String, List<LeaderRecorder>> recorders = createCluster(type, pods, partitions); context.start(); waitForAllLeaders(recorders, leaders -> { Map<String, Long> counts = leaders.values().stream() .collect(Collectors.groupingBy(Function.identity(), Collectors.counting())); for (Long count : counts.values()) { if (count < expectedPartitionsPerPod || count > expectedPartitionsPerPod + tolerance) { return false; } } return true; }, 30, TimeUnit.SECONDS); } private Map<String, List<LeaderRecorder>> createCluster(LeaseResourceType type, int pods, int partitions) { Map<String, List<LeaderRecorder>> recorders = new HashMap<>(); for (int i = 0; i < partitions; i++) { String partitionName = "partition-" + i; recorders.put(partitionName, new ArrayList<>()); for (int j = 0; j < pods; j++) { recorders.get(partitionName).add(addMember("mypod-" + j, partitionName, type, true)); } } return recorders; } private void waitForAllLeaders( Map<String, List<LeaderRecorder>> partitionRecorders, Predicate<Map<String, String>> condition, long time, TimeUnit unit) { Awaitility.waitAtMost(time, unit).until(() -> { Map<String, String> leaders = new HashMap<>(); for (String partition : partitionRecorders.keySet()) { String leader = null; for (LeaderRecorder recorder : partitionRecorders.get(partition)) { String partitionLeader = recorder.getCurrentLeader(); if (partitionLeader == null || (leader != null && !leader.equals(partitionLeader))) { return false; } leader = partitionLeader; } if (leader == null) { return false; } leaders.put(partition, leader); } return condition.test(leaders); }); } private void withLockServer(String pod, Consumer<LockTestServer<?>> consumer) { consumer.accept(this.lockServers.get(pod)); } private void delayRequestsFromPod(String pod, long delay, TimeUnit unit) { withLockServer(pod, server -> server.setDelayRequests(TimeUnit.MILLISECONDS.convert(delay, unit))); } private void refuseRequestsFromPod(String pod) { withLockServer(pod, server -> server.setRefuseRequests(true)); } private void allowRequestsFromPod(String pod) { withLockServer(pod, server -> server.setRefuseRequests(false)); } private void disconnectPod(String pod) { for (LockTestServer<?> server : this.lockServers.values()) { server.removePod(pod); } } private void connectPod(String pod) { for (LockTestServer<?> server : this.lockServers.values()) { server.addPod(pod); } } private void connectSimulator(ResourceLockSimulator<?> lockSimulator) { for (LockTestServer<?> server : this.lockServers.values()) { server.addSimulator(lockSimulator); } } private void checkLeadershipChangeDistance(long minimum, TimeUnit unit, LeaderRecorder... recorders) { List<LeaderRecorder.LeadershipInfo> infos = Arrays.stream(recorders).flatMap(lr -> lr.getLeadershipInfo().stream()) .sorted(Comparator.comparingLong(LeaderRecorder.LeadershipInfo::getChangeTimestamp)) .collect(Collectors.toList()); LeaderRecorder.LeadershipInfo currentLeaderLastSeen = null; for (LeaderRecorder.LeadershipInfo info : infos) { if (currentLeaderLastSeen == null || currentLeaderLastSeen.getLeader() == null) { currentLeaderLastSeen = info; } else { if (Objects.equals(info.getLeader(), currentLeaderLastSeen.getLeader())) { currentLeaderLastSeen = info; } else if (info.getLeader() != null && !info.getLeader().equals(currentLeaderLastSeen.getLeader())) { // switch long delay = info.getChangeTimestamp() - currentLeaderLastSeen.getChangeTimestamp(); assertTrue(delay >= TimeUnit.MILLISECONDS.convert(minimum, unit), "Lease time not elapsed between switch, minimum=" + TimeUnit.MILLISECONDS.convert(minimum, unit) + ", found=" + delay); currentLeaderLastSeen = info; } } } } private LeaderRecorder addMember(String name, LeaseResourceType type) { return addMember(name, "app", type); } private LeaderRecorder addMember(String name, String namespace, LeaseResourceType type) { return addMember(name, namespace, type, false); } private LeaderRecorder addMember(String name, String namespace, LeaseResourceType type, boolean rebalancing) { ResourceLockSimulator<?> lockSimulator; switch (type) { case ConfigMap: if (this.configMapLockSimulator == null) { this.configMapLockSimulator = new ConfigMapLockSimulator("leaders"); } lockSimulator = this.configMapLockSimulator; break; case Lease: if (!this.leaseLockSimulators.containsKey(namespace)) { this.leaseLockSimulators.put(namespace, new LeaseLockSimulator("leaders-" + namespace)); } lockSimulator = this.leaseLockSimulators.get(namespace); break; default: throw new IllegalArgumentException("Unsupported LeaseResourceType " + type); } if (!this.lockServers.containsKey(name)) { this.lockServers.put(name, new LockTestServer<>()); } LockTestServer<?> lockServer = this.lockServers.get(name); CamelPreemptiveClusterService member = clusterServices.get(name); if (member == null) { KubernetesConfiguration configuration = new KubernetesConfiguration(); configuration.setKubernetesClient(lockServer.createClient()); KubernetesClusterService service = new KubernetesClusterService(configuration); service.setKubernetesNamespace("test"); service.setPodName(name); service.setLeaseDurationMillis(LEASE_TIME_MILLIS); service.setRenewDeadlineMillis(RENEW_DEADLINE_MILLIS); service.setRetryPeriodMillis(RETRY_PERIOD_MILLIS); service.setJitterFactor(JITTER_FACTOR); service.setLeaseResourceType(type); if (rebalancing) { member = new RebalancingCamelClusterService(service, RETRY_PERIOD_MILLIS); } else { member = service; } try { context().addService(member); } catch (Exception ex) { throw new RuntimeException(ex); } clusterServices.put(name, member); } LeaderRecorder recorder = new LeaderRecorder(); try { member.getView(namespace).addEventListener(recorder); } catch (Exception ex) { throw new RuntimeException(ex); } for (String pod : this.lockServers.keySet()) { connectPod(pod); connectSimulator(lockSimulator); } return recorder; } @Override public boolean isUseRouteBuilder() { return false; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.controller; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.logging.LogLevel; public class TerminationAwareLogger implements ComponentLog { private static final String TERMINATED_TASK_PREFIX = "[Terminated Process] - "; private final ComponentLog logger; private volatile boolean terminated = false; public TerminationAwareLogger(final ComponentLog logger) { this.logger = logger; } public void terminate() { this.terminated = true; } private boolean isTerminated() { return terminated; } private String getMessage(String originalMessage, LogLevel logLevel) { return TERMINATED_TASK_PREFIX + logLevel.name() + " - " + originalMessage; } @Override public void warn(String msg, Throwable t) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.WARN), t); return; } logger.warn(msg, t); } @Override public void warn(String msg, Object[] os) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.WARN), os); return; } logger.warn(msg, os); } @Override public void warn(String msg, Object[] os, Throwable t) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.WARN), os, t); return; } logger.warn(msg, os, t); } @Override public void warn(String msg) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.WARN)); return; } logger.warn(msg); } @Override public void trace(String msg, Throwable t) { if (isTerminated()) { logger.trace(getMessage(msg, LogLevel.TRACE), t); return; } logger.trace(msg, t); } @Override public void trace(String msg, Object[] os) { if (isTerminated()) { logger.trace(getMessage(msg, LogLevel.TRACE), os); return; } logger.trace(msg, os); } @Override public void trace(String msg) { if (isTerminated()) { logger.trace(getMessage(msg, LogLevel.TRACE)); return; } logger.trace(msg); } @Override public void trace(String msg, Object[] os, Throwable t) { if (isTerminated()) { logger.trace(getMessage(msg, LogLevel.TRACE), os, t); return; } logger.trace(msg, os, t); } @Override public boolean isWarnEnabled() { return logger.isWarnEnabled(); } @Override public boolean isTraceEnabled() { return logger.isTraceEnabled(); } @Override public boolean isInfoEnabled() { return logger.isInfoEnabled(); } @Override public boolean isErrorEnabled() { return logger.isErrorEnabled(); } @Override public boolean isDebugEnabled() { return logger.isDebugEnabled(); } @Override public void info(String msg, Throwable t) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.INFO), t); return; } logger.info(msg, t); } @Override public void info(String msg, Object[] os) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.INFO), os); return; } logger.info(msg, os); } @Override public void info(String msg) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.INFO)); return; } logger.info(msg); } @Override public void info(String msg, Object[] os, Throwable t) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.INFO), os, t); return; } logger.info(msg, os, t); } @Override public String getName() { return logger.getName(); } @Override public void error(String msg, Throwable t) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.ERROR), t); return; } logger.error(msg, t); } @Override public void error(String msg, Object[] os) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.ERROR), os); return; } logger.error(msg, os); } @Override public void error(String msg) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.ERROR)); return; } logger.error(msg); } @Override public void error(String msg, Object[] os, Throwable t) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.ERROR), os, t); return; } logger.error(msg, os, t); } @Override public void debug(String msg, Throwable t) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.DEBUG), t); return; } logger.debug(msg, t); } @Override public void debug(String msg, Object[] os) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.DEBUG), os); return; } logger.debug(msg, os); } @Override public void debug(String msg, Object[] os, Throwable t) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.DEBUG), os, t); return; } logger.debug(msg, os, t); } @Override public void debug(String msg) { if (isTerminated()) { logger.debug(getMessage(msg, LogLevel.DEBUG)); return; } logger.debug(msg); } @Override public void log(LogLevel level, String msg, Throwable t) { if (isTerminated()) { logger.debug(getMessage(msg, level), t); return; } logger.log(level, msg, t); } @Override public void log(LogLevel level, String msg, Object[] os) { if (isTerminated()) { logger.debug(getMessage(msg, level), os); return; } logger.log(level, msg, os); } @Override public void log(LogLevel level, String msg) { if (isTerminated()) { logger.debug(getMessage(msg, level)); return; } logger.log(level, msg); } @Override public void log(LogLevel level, String msg, Object[] os, Throwable t) { if (isTerminated()) { logger.debug(getMessage(msg, level), os, t); return; } logger.log(level, msg, os, t); } }
package hope.it.works.rainfall; import hope.it.works.ct.TriangleDataPrim; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.HashSet; import vgl.iisc.utils.MyIntList; public class TimeGraph { public class Node { public int time; public int featureNo; public int cpVertex; public float x, y; public float dx, dy; public int size; } public class Edge { public int n1; public int n2; @Override public int hashCode() { String s = "" + n1 + " " + n2; if(n1 > n2) { s = "" + n2 + " " + n1; } return s.hashCode(); } @Override public boolean equals(Object obj) { Edge e = (Edge) obj; return (e.n1 == n1 && e.n2 == n2); } } // Adjacent edges for each node public class Adjacencies { public MyIntList prev = new MyIntList(2); public MyIntList next = new MyIntList(2); } public ArrayList<Node> nodes; public ArrayList<Edge> edges; public HashSet<Edge> allEdges = new HashSet<TimeGraph.Edge>(); public ArrayList<Adjacencies> adj; Cloud [] clouds = new Cloud[2]; int totalTime; public int [][] index; String [] ts; String model; String folder; int nx, ny; int nv; public boolean createClouds; // aila - 210 // mumbai - 220 // nakazawa - 221 // nakazawa-long 220 public float [] th; public void findClouds(TriangleDataPrim data, String folder, String model, String [] timeSteps, int nx, int ny) { this.folder = folder; this.model = model; ts = timeSteps; this.nx = nx; this.ny = ny; nodes = new ArrayList<Node>(); edges = new ArrayList<Edge>(); adj = new ArrayList<Adjacencies>(); totalTime = timeSteps.length; nv = data.noVertices; // finding clouds across time steps System.out.println("Finding clouds across time steps"); index = new int[totalTime][]; int ct = 0; for(int i = 0;i < timeSteps.length;i ++) { Cloud cloud; if(createClouds) { cloud = new Cloud(null, data, folder + "data/" + model + "/" + model + "-" + timeSteps[i]+ ".boff", th); cloud.time = i; cloud.write(); } else { cloud = new Cloud(); cloud.read(i); } index[i] = new int[cloud.criticalPoints.length]; for(int j = 0;j < index[i].length;j ++) { index[i][j] = ct ++; Node node = new Node(); node.cpVertex = cloud.criticalPoints[j]; node.time = i; node.featureNo = j; nodes.add(node); Adjacencies adj = new Adjacencies(); this.adj.add(adj); } } } float ep = 0.0001f; public void createGraph() { System.out.println("Creating Graph!!!"); clouds[0] = new Cloud(); clouds[1] = new Cloud(); int ct = 0; clouds[0].read(ct); clouds[1].read(ct + 1); int [] vecCt = new int[nodes.size()]; int ein = 0; for(int i = 0;i < totalTime;i ++) { ct = i; int nt = ct + 1; if(nt < totalTime) { readVector(ct,0); for(int j = 0;j < vector.length;j ++) { int curCloud = clouds[0].region[j]; if(curCloud == -1) { continue; } int x = j % nx;; int y = j / nx; float xx = x; float yy = y; int in = index[ct][curCloud]; Node n = nodes.get(in); n.x += x; n.y += y; n.dx += vector[j][0]; n.dy += vector[j][1]; vecCt[in] ++; xx += vector[j][0]; yy += vector[j][1]; x = Math.round(xx); y = Math.round(yy); if(x < 0 || y < 0 || x >= nx || y >= ny) { continue; } int v = x + nx * y; int nextCloud = clouds[1].region[v]; if(nextCloud != -1) { int nodeIn = index[nt][nextCloud]; Edge e = new Edge(); e.n1 = in; e.n2 = nodeIn; if(!allEdges.contains(e)) { edges.add(e); allEdges.add(e); Adjacencies ad = adj.get(nodeIn); ad.prev.add(ein); ad = adj.get(in); ad.next.add(ein); ein ++; } } } } else { for(int j = 0;j < vector.length;j ++) { int curCloud = clouds[0].region[j]; if(curCloud == -1) { continue; } int x = j % nx;; int y = j / nx; int in = index[ct][curCloud]; Node n = nodes.get(in); n.x += x; n.y += y; vecCt[in] ++; } } // update clouds for next iteration clouds[0] = clouds[1]; nt ++; if(nt < totalTime) { clouds[1] = new Cloud(); clouds[1].read(nt); } else { clouds[1] = null; } } for(int i = 0;i < vecCt.length;i ++) { Node n = nodes.get(i); n.size = vecCt[i]; if(vecCt[i] == 0) { n.dx = 0; n.dy = 0; n.x = n.cpVertex % nx; n.y = n.cpVertex / nx; continue; } n.dx /= vecCt[i]; n.dy /= vecCt[i]; n.x /= vecCt[i]; n.y /= vecCt[i]; // float norm = n.dx * n.dx + n.dy * n.dy; // norm = (float) Math.sqrt(norm); // if(norm > ep) { // n.dx = n.dx * 10 / norm; // n.dy = n.dy * 10 / norm; // } else { // n.dx = 0; // n.dy = 0; // } } } public float [][] vector, vector1; public void readVector(int ct, int no) { String file = folder + "vector/" + model + "/" + model + "-" + ts[ct] + ".field"; if(no == 0) { readVector(file); } else { readVector1(file); } } public void readRevVector(int ct) { String file = folder + "vector/" + model + "/" + model + "-" + ts[ct] + ".rfield"; readVector(file); } private void readVector(String file) { try { byte [] b = new byte[nv*2*4]; BufferedInputStream ip = new BufferedInputStream(new FileInputStream(file)); ip.read(b); ip.close(); ByteBuffer buf = ByteBuffer.wrap(b); buf.order(ByteOrder.LITTLE_ENDIAN); if(vector == null) { vector = new float[nv][2]; } for(int i = 0;i < vector.length;i ++) { vector[i][0] = buf.getFloat(); vector[i][1] = buf.getFloat(); } } catch (Exception e) { e.printStackTrace(); System.exit(0); } } private void readVector1(String file) { try { byte [] b = new byte[nv*2*4]; BufferedInputStream ip = new BufferedInputStream(new FileInputStream(file)); ip.read(b); ip.close(); ByteBuffer buf = ByteBuffer.wrap(b); buf.order(ByteOrder.LITTLE_ENDIAN); if(vector1 == null) { vector1 = new float[nv][2]; } for(int i = 0;i < vector1.length;i ++) { vector1[i][0] = buf.getFloat(); vector1[i][1] = buf.getFloat(); } } catch (Exception e) { e.printStackTrace(); System.exit(0); } } public void findEastMovingClouds() { System.out.println("Finding east moving clouds!!!"); // clouds[0] = new Cloud(); // clouds[1] = new Cloud(); // int ct = 0; // clouds[0].read(ct); // clouds[1].read(ct + 1); for(int i = 0;i < totalTime;i ++) { findEastMovingCloud(i,3); } } public enum Direction {North, South, East, West}; public ArrayList<Integer> findDirMovingCloud(int time, int tlen, Direction dir) { ArrayList<Integer> list = new ArrayList<Integer>(); int ct = time; int nt = ct + 1; System.out.println("Processing time " + ct + " of " + totalTime); if(clouds[0] == null) { clouds[0] = new Cloud(); } clouds[0].read(ct); if(nt < totalTime) { readVector(ct,1); for(int j = 0;j < vector1.length;j ++) { int curCloud = clouds[0].region[j]; if(curCloud == -1) { continue; } switch(dir) { case North: if(vector1[j][1] > 0) { // moving right int len = getTrackLength(ct, j, tlen, dir); if(len >= tlen) { System.out.println("North moving for time " + len + " from time " + ct); list.add(j); } } break; case South: if(vector1[j][1] < 0) { // moving south int len = getTrackLength(ct, j, tlen, dir); if(len >= tlen) { System.out.println("South moving for time " + len + " from time " + ct); list.add(j); } } break; case East: if(vector1[j][0] > 0) { // moving right int len = getTrackLength(ct, j, tlen, dir); if(len >= tlen) { System.out.println("East moving for time " + len + " from time " + ct); list.add(j); } } break; case West: if(vector1[j][0] < 0) { // moving right int len = getTrackLength(ct, j, tlen, dir); if(len >= tlen) { System.out.println("West moving for time " + len + " from time " + ct); list.add(j); } } break; } } } return list; } public ArrayList<Integer> findEastMovingCloud(int time, int tlen) { ArrayList<Integer> list = new ArrayList<Integer>(); int ct = time; int nt = ct + 1; System.out.println("Processing time " + ct + " of " + totalTime); if(clouds[0] == null) { clouds[0] = new Cloud(); } clouds[0].read(ct); if(nt < totalTime) { readVector(ct,1); for(int j = 0;j < vector1.length;j ++) { int curCloud = clouds[0].region[j]; if(curCloud == -1) { continue; } if(vector1[j][0] > 0) { // moving right int len = getTrackLength(ct, j, tlen, Direction.East); if(len >= tlen) { System.out.println("East moving for time " + len + " from time " + ct); list.add(j); } } } } return list; } private int getTrackLength(int time, int v, int tlen, Direction dir) { Cloud c = new Cloud(); int len = 1; for(int i = time;i < totalTime - 1;i ++) { c.read(i + 1); readVector(i,0); float xx = v % nx; float yy = v / nx; xx += vector[v][0]; yy += vector[v][1]; int x = Math.round(xx); int y = Math.round(yy); if(vector[v][0] < 0) { break; } v = x + y * nx; if(x >= nx || x < 0 || y >= ny || y < 0 || c.region[v] == -1 || vector[v][0] < 0) { break; } len ++; if(len >= tlen) { return len; } } return len; } public ArrayList<Integer> findWestMovingClouds(int time) { ArrayList<Integer> list = new ArrayList<Integer>(); int ct = time; System.out.println("Processing time " + ct + " of " + totalTime); if(clouds[0] == null) { clouds[0] = new Cloud(); } clouds[0].read(ct); readVector(ct,1); for(int j = 0;j < vector1.length;j ++) { int curCloud = clouds[0].region[j]; if(curCloud == -1) { continue; } if(vector1[j][0] < 0) { // moving left list.add(j); } } return list; } public ArrayList<Integer> findWestMovingClouds(int time, int minLength) { ArrayList<Integer> list = new ArrayList<Integer>(); int ct = time; System.out.println("Processing time " + ct + " of " + totalTime); if(clouds[0] == null) { clouds[0] = new Cloud(); } clouds[0].read(ct); readVector(ct,1); for(int j = 0;j < vector1.length;j ++) { if(j % 1000 == 0) { System.out.println(j + " of " + vector1.length); } int curCloud = clouds[0].region[j]; if(curCloud == -1) { continue; } if(vector1[j][0] < 0) { // moving left boolean west= getWestTrackLength(ct, j, minLength); if(west) { list.add(j); } } } return list; } private boolean getWestTrackLength(int time, int v, int minLength) { Cloud c = new Cloud(); int len = 1; for(int i = time;i < totalTime - 1;i ++) { c.read(i + 1); readVector(i,0); float xx = v % nx; float yy = v / nx; xx += vector[v][0]; yy += vector[v][1]; if(vector[v][0] >= 0) { break; } int x = Math.round(xx); int y = Math.round(yy); v = x + y * nx; if(x >= nx || x < 0 || y >= ny || y < 0 || c.region[v] == -1 || vector[v][0] < 0) { break; } len ++; if(len >= minLength) { return true; } } if(len < minLength) { return false; } return true; } }
/* * Copyright 2012-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.command; import static com.facebook.buck.testutil.MoreAsserts.assertListEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertSame; import static org.junit.Assert.fail; import com.facebook.buck.android.AndroidBinaryRule; import com.facebook.buck.android.AndroidLibraryRule; import com.facebook.buck.android.AndroidResourceRule; import com.facebook.buck.android.NdkLibrary; import com.facebook.buck.command.Project.SourceFolder; import com.facebook.buck.java.DefaultJavaLibraryRule; import com.facebook.buck.java.JavaLibraryRule; import com.facebook.buck.java.JavaTestRule; import com.facebook.buck.java.Keystore; import com.facebook.buck.java.PrebuiltJarRule; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargetFactory; import com.facebook.buck.model.BuildTargetPattern; import com.facebook.buck.model.SingletonBuildTargetPattern; import com.facebook.buck.parser.PartialGraph; import com.facebook.buck.parser.PartialGraphFactory; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.DependencyGraph; import com.facebook.buck.rules.FakeAbstractBuildRuleBuilderParams; import com.facebook.buck.rules.FileSourcePath; import com.facebook.buck.rules.JavaPackageFinder; import com.facebook.buck.rules.ProjectConfigRule; import com.facebook.buck.step.ExecutionContext; import com.facebook.buck.testutil.BuckTestConstant; import com.facebook.buck.testutil.RuleMap; import com.facebook.buck.util.HumanReadableException; import com.facebook.buck.util.ProjectFilesystem; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import org.easymock.EasyMock; import org.junit.Test; import java.io.IOException; import java.nio.file.Path; import java.util.List; import java.util.Map; import java.util.Properties; import javax.annotation.Nullable; public class ProjectTest { private static final String PATH_TO_GUAVA_JAR = "third_party/guava/guava-10.0.1.jar"; @SuppressWarnings("PMD.UnusedPrivateField") private PrebuiltJarRule guava; /** * Creates a PartialGraph with two android_binary rules, each of which depends on the same * android_library. The difference between the two is that one lists Guava in its no_dx list and * the other does not. * <p> * The PartialGraph also includes three project_config rules: one for the android_library, and one * for each of the android_binary rules. */ public ProjectWithModules createPartialGraphForTesting( @Nullable JavaPackageFinder javaPackageFinder) throws IOException { BuildRuleResolver ruleResolver = new BuildRuleResolver(); // java_library //buck-out/android/com/facebook:R ruleResolver.buildAndAddToIndex( DefaultJavaLibraryRule.newJavaLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//buck-out/android/com/facebook:R")) .addSrc("buck-out/android/com/facebook/R.java") .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); // prebuilt_jar //third_party/guava:guava guava = ruleResolver.buildAndAddToIndex( PrebuiltJarRule.newPrebuiltJarRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//third_party/guava:guava")) .setBinaryJar(PATH_TO_GUAVA_JAR) .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); // android_resouce android_res/base:res ruleResolver.buildAndAddToIndex( AndroidResourceRule.newAndroidResourceRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//android_res/base:res")) .setRes("android_res/base/res") .setRDotJavaPackage("com.facebook") .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); // project_config android_res/base:res ProjectConfigRule projectConfigRuleForResource = ruleResolver.buildAndAddToIndex( ProjectConfigRule.newProjectConfigRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//android_res/base:project_config")) .setSrcTarget(Optional.of(BuildTargetFactory.newInstance("//android_res/base:res"))) .setSrcRoots(ImmutableList.of("res"))); // java_library //java/src/com/facebook/grandchild:grandchild BuildTarget grandchild = BuildTargetFactory.newInstance( "//java/src/com/facebook/grandchild:grandchild"); ruleResolver.buildAndAddToIndex( DefaultJavaLibraryRule.newJavaLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(grandchild) .addSrc("Grandchild.java") .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); // java_library //java/src/com/facebook/child:child ruleResolver.buildAndAddToIndex( DefaultJavaLibraryRule.newJavaLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance( "//java/src/com/facebook/child:child")) .addSrc("Child.java") .addDep(grandchild) .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); // java_library //java/src/com/facebook/exportlib:exportlib ruleResolver.buildAndAddToIndex( DefaultJavaLibraryRule.newJavaLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance( "//java/src/com/facebook/exportlib:exportlib")) .addSrc("ExportLib.java") .addDep(BuildTargetFactory.newInstance("//third_party/guava:guava")) .addExportedDep(BuildTargetFactory.newInstance("//third_party/guava:guava")) .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); // android_library //java/src/com/facebook/base:base ruleResolver.buildAndAddToIndex( AndroidLibraryRule.newAndroidLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//java/src/com/facebook/base:base")) .addSrc("Base.java") .addDep(BuildTargetFactory.newInstance("//buck-out/android/com/facebook:R")) .addDep(BuildTargetFactory.newInstance("//java/src/com/facebook/exportlib:exportlib")) .addDep(BuildTargetFactory.newInstance("//java/src/com/facebook/child:child")) .addDep(BuildTargetFactory.newInstance("//android_res/base:res")) .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); // project_config //java/src/com/facebook/base:project_config ProjectConfigRule projectConfigRuleForLibrary = ruleResolver.buildAndAddToIndex( ProjectConfigRule.newProjectConfigRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance( "//java/src/com/facebook/base:project_config")) .setSrcTarget(Optional.of(BuildTargetFactory.newInstance( "//java/src/com/facebook/base:base"))) .setSrcRoots(ImmutableList.of("src", "src-gen"))); ProjectConfigRule projectConfigRuleForExportLibrary = ruleResolver.buildAndAddToIndex( ProjectConfigRule.newProjectConfigRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance( "//java/src/com/facebook/exportlib:project_config")) .setSrcTarget(Optional.of(BuildTargetFactory.newInstance( "//java/src/com/facebook/exportlib:exportlib"))) .setSrcRoots(ImmutableList.of("src"))); // keystore //keystore:debug BuildTarget keystoreTarget = BuildTargetFactory.newInstance("//keystore:debug"); ruleResolver.buildAndAddToIndex( Keystore.newKeystoreBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(keystoreTarget) .setStore("keystore/debug.keystore") .setProperties("keystore/debug.keystore.properties") .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); // android_binary //foo:app ruleResolver.buildAndAddToIndex( AndroidBinaryRule.newAndroidBinaryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//foo:app")) .addClasspathDep(BuildTargetFactory.newInstance("//java/src/com/facebook/base:base")) .setManifest(new FileSourcePath("foo/AndroidManifest.xml")) .setTarget("Google Inc.:Google APIs:16") .setKeystore(keystoreTarget) .addBuildRuleToExcludeFromDex(BuildTargetFactory.newInstance("//third_party/guava:guava"))); // project_config //foo:project_config ProjectConfigRule projectConfigRuleUsingNoDx = ruleResolver.buildAndAddToIndex( ProjectConfigRule.newProjectConfigRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//foo:project_config")) .setSrcTarget(Optional.of(BuildTargetFactory.newInstance("//foo:app")))); // android_binary //bar:app ruleResolver.buildAndAddToIndex( AndroidBinaryRule.newAndroidBinaryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//bar:app")) .addClasspathDep(BuildTargetFactory.newInstance("//java/src/com/facebook/base:base")) .setManifest(new FileSourcePath("foo/AndroidManifest.xml")) .setTarget("Google Inc.:Google APIs:16") .setKeystore(keystoreTarget)); // project_config //bar:project_config ProjectConfigRule projectConfigRule = ruleResolver.buildAndAddToIndex( ProjectConfigRule.newProjectConfigRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//bar:project_config")) .setSrcTarget(Optional.of(BuildTargetFactory.newInstance("//bar:app")))); return getModulesForPartialGraph(ruleResolver, ImmutableList.of( projectConfigRuleForExportLibrary, projectConfigRuleForLibrary, projectConfigRuleForResource, projectConfigRuleUsingNoDx, projectConfigRule), javaPackageFinder); } @Test public void testGenerateRelativeGenPath() { String basePathOfModuleWithSlash = "android_res/com/facebook/gifts/"; Path expectedRelativePathToGen = java.nio.file.Paths.get("/../../../../buck-out/android/android_res/com/facebook/gifts/gen"); assertEquals( expectedRelativePathToGen, Project.generateRelativeGenPath(basePathOfModuleWithSlash)); } /** * This is an important test that verifies that the {@code no_dx} argument for an * {@code android_binary} is handled appropriately when generating an IntelliJ project. */ @Test public void testProject() throws IOException { JavaPackageFinder javaPackageFinder = EasyMock.createMock(JavaPackageFinder.class); EasyMock.expect(javaPackageFinder.findJavaPackageForPath("foo/module_foo.iml")).andReturn(""); EasyMock.expect(javaPackageFinder.findJavaPackageForPath("bar/module_bar.iml")).andReturn(""); EasyMock.replay(javaPackageFinder); ProjectWithModules projectWithModules = createPartialGraphForTesting(javaPackageFinder); Project project = projectWithModules.project; PartialGraph partialGraph = project.getPartialGraph(); List<Module> modules = projectWithModules.modules; assertEquals("Should be one module for the java_library, one for the android_library, " + "one module for the android_resource, and one for each android_binary", 5, modules.size()); // Check the values of the module that corresponds to the android_library. Module javaLibraryModule = modules.get(0); assertSame(getRuleById("//java/src/com/facebook/exportlib:exportlib", partialGraph), javaLibraryModule.srcRule); assertEquals("module_java_src_com_facebook_exportlib", javaLibraryModule.name); assertEquals("java/src/com/facebook/exportlib/module_java_src_com_facebook_exportlib.iml", javaLibraryModule.pathToImlFile); assertListEquals( ImmutableList.of(SourceFolder.SRC), javaLibraryModule.sourceFolders); // Check the dependencies. DependentModule inheritedJdk = DependentModule.newInheritedJdk(); DependentModule guavaAsProvidedDep = DependentModule.newLibrary( guava.getBuildTarget(), "third_party_guava_guava_10_0_1_jar"); guavaAsProvidedDep.scope = "PROVIDED"; assertListEquals( ImmutableList.of( DependentModule.newSourceFolder(), guavaAsProvidedDep, DependentModule.newStandardJdk()), javaLibraryModule.dependencies); // Check the values of the module that corresponds to the android_library. Module androidLibraryModule = modules.get(1); assertSame(getRuleById("//java/src/com/facebook/base:base", partialGraph), androidLibraryModule.srcRule); assertEquals("module_java_src_com_facebook_base", androidLibraryModule.name); assertEquals("java/src/com/facebook/base/module_java_src_com_facebook_base.iml", androidLibraryModule.pathToImlFile); assertListEquals( ImmutableList.of( SourceFolder.SRC, new SourceFolder("file://$MODULE_DIR$/src-gen", false /* isTestSource */), SourceFolder.GEN), androidLibraryModule.sourceFolders); assertEquals(Boolean.TRUE, androidLibraryModule.hasAndroidFacet); assertEquals(Boolean.TRUE, androidLibraryModule.isAndroidLibraryProject); assertEquals(null, androidLibraryModule.proguardConfigPath); assertEquals(null, androidLibraryModule.resFolder); // Check the dependencies. DependentModule androidResourceAsProvidedDep = DependentModule.newModule( BuildTargetFactory.newInstance("//android_res/base:res"), "module_android_res_base"); DependentModule childAsProvidedDep = DependentModule.newModule( BuildTargetFactory.newInstance("//java/src/com/facebook/child:child"), "module_java_src_com_facebook_child"); DependentModule exportDepsAsProvidedDep = DependentModule.newModule( BuildTargetFactory.newInstance("//java/src/com/facebook/exportlib:exportlib"), "module_java_src_com_facebook_exportlib"); assertListEquals( ImmutableList.of( DependentModule.newSourceFolder(), guavaAsProvidedDep, androidResourceAsProvidedDep, childAsProvidedDep, exportDepsAsProvidedDep, inheritedJdk), androidLibraryModule.dependencies); // Check the values of the module that corresponds to the android_binary that uses no_dx. Module androidResourceModule = modules.get(2); assertSame(getRuleById("//android_res/base:res", partialGraph), androidResourceModule.srcRule); assertEquals("/res", androidResourceModule.resFolder); // Check the values of the module that corresponds to the android_binary that uses no_dx. Module androidBinaryModuleNoDx = modules.get(3); assertSame(getRuleById("//foo:app", partialGraph), androidBinaryModuleNoDx.srcRule); assertEquals("module_foo", androidBinaryModuleNoDx.name); assertEquals("foo/module_foo.iml", androidBinaryModuleNoDx.pathToImlFile); assertListEquals(ImmutableList.of(SourceFolder.GEN), androidBinaryModuleNoDx.sourceFolders); assertEquals(Boolean.TRUE, androidBinaryModuleNoDx.hasAndroidFacet); assertEquals(Boolean.FALSE, androidBinaryModuleNoDx.isAndroidLibraryProject); assertEquals(null, androidBinaryModuleNoDx.proguardConfigPath); assertEquals(null, androidBinaryModuleNoDx.resFolder); assertEquals("../keystore/debug.keystore", androidBinaryModuleNoDx.keystorePath); // Check the dependencies. DependentModule grandchildAsProvidedDep = DependentModule.newModule( BuildTargetFactory.newInstance("//java/src/com/facebook/grandchild:grandchild"), "module_java_src_com_facebook_grandchild" ); DependentModule androidLibraryDep = DependentModule.newModule( androidLibraryModule.srcRule.getBuildTarget(), "module_java_src_com_facebook_base"); assertEquals( ImmutableList.of( DependentModule.newSourceFolder(), guavaAsProvidedDep, androidLibraryDep, androidResourceAsProvidedDep, childAsProvidedDep, exportDepsAsProvidedDep, grandchildAsProvidedDep, inheritedJdk), androidBinaryModuleNoDx.dependencies); // Check the values of the module that corresponds to the android_binary with an empty no_dx. Module androidBinaryModuleEmptyNoDx = modules.get(4); assertSame(getRuleById("//bar:app", partialGraph), androidBinaryModuleEmptyNoDx.srcRule); assertEquals("module_bar", androidBinaryModuleEmptyNoDx.name); assertEquals("bar/module_bar.iml", androidBinaryModuleEmptyNoDx.pathToImlFile); assertListEquals( ImmutableList.of(SourceFolder.GEN), androidBinaryModuleEmptyNoDx.sourceFolders); assertEquals(Boolean.TRUE, androidBinaryModuleEmptyNoDx.hasAndroidFacet); assertEquals(Boolean.FALSE, androidBinaryModuleEmptyNoDx.isAndroidLibraryProject); assertEquals(null, androidBinaryModuleEmptyNoDx.proguardConfigPath); assertEquals(null, androidBinaryModuleEmptyNoDx.resFolder); assertEquals("../keystore/debug.keystore", androidBinaryModuleEmptyNoDx.keystorePath); // Check the dependencies. DependentModule guavaAsCompiledDep = DependentModule.newLibrary( guava.getBuildTarget(), "third_party_guava_guava_10_0_1_jar"); assertEquals("Important that Guava is listed as a 'COMPILED' dependency here because it is " + "only listed as a 'PROVIDED' dependency earlier.", ImmutableList.of( DependentModule.newSourceFolder(), guavaAsCompiledDep, androidLibraryDep, androidResourceAsProvidedDep, childAsProvidedDep, exportDepsAsProvidedDep, grandchildAsProvidedDep, inheritedJdk), androidBinaryModuleEmptyNoDx.dependencies); // Check that the correct data was extracted to populate the .idea/libraries directory. BuildRule guava = getRuleById("//third_party/guava:guava", partialGraph); assertSame(guava, Iterables.getOnlyElement(project.getLibraryJars())); } @Test public void testPrebuiltJarIncludesDeps() throws IOException { BuildRuleResolver ruleResolver = new BuildRuleResolver(); // Build up a the graph that corresponds to: // // android_library( // name = 'example', // deps = [ // ':easymock', // ], // ) // // prebuilt_jar( // name = 'easymock', // binary_jar = 'easymock.jar', // deps = [ // ':cglib', // ':objenesis', // ], // ) // // prebuilt_jar( // name = 'cglib', // binary_jar = 'cglib.jar', // ) // // prebuilt_jar( // name = 'objenesis', // binary_jar = 'objenesis.jar', // ) // // project_config( // src_target = ':example', // ) PrebuiltJarRule cglib = ruleResolver.buildAndAddToIndex( PrebuiltJarRule.newPrebuiltJarRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//third_party/java/easymock:cglib")) .setBinaryJar("third_party/java/easymock/cglib.jar")); PrebuiltJarRule objenesis = ruleResolver.buildAndAddToIndex( PrebuiltJarRule.newPrebuiltJarRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//third_party/java/easymock:objenesis")) .setBinaryJar("third_party/java/easymock/objenesis.jar")); PrebuiltJarRule easymock = ruleResolver.buildAndAddToIndex( PrebuiltJarRule.newPrebuiltJarRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//third_party/java/easymock:easymock")) .setBinaryJar("third_party/java/easymock/easymock.jar") .addDep(BuildTargetFactory.newInstance("//third_party/java/easymock:cglib")) .addDep(BuildTargetFactory.newInstance("//third_party/java/easymock:objenesis"))); BuildTarget easyMockExampleTarget = BuildTargetFactory.newInstance( "//third_party/java/easymock:example"); ruleResolver.buildAndAddToIndex( AndroidLibraryRule.newAndroidLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(easyMockExampleTarget) .addDep(BuildTargetFactory.newInstance("//third_party/java/easymock:easymock"))); ProjectConfigRule projectConfig = ruleResolver.buildAndAddToIndex( ProjectConfigRule.newProjectConfigRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget( BuildTargetFactory.newInstance("//third_party/java/easymock:project_config")) .setSrcTarget(Optional.of(easyMockExampleTarget))); ProjectWithModules projectWithModules = getModulesForPartialGraph(ruleResolver, ImmutableList.of(projectConfig), null /* javaPackageFinder */); List<Module> modules = projectWithModules.modules; // Verify that the single Module that is created transitively includes all JAR files. assertEquals("Should be one module for the android_library", 1, modules.size()); Module androidLibraryModule = Iterables.getOnlyElement(modules); assertListEquals(ImmutableList.of( DependentModule.newSourceFolder(), DependentModule.newLibrary( easymock.getBuildTarget(), "third_party_java_easymock_easymock_jar"), DependentModule.newLibrary( cglib.getBuildTarget(), "third_party_java_easymock_cglib_jar"), DependentModule.newLibrary( objenesis.getBuildTarget(), "third_party_java_easymock_objenesis_jar"), DependentModule.newInheritedJdk()), androidLibraryModule.dependencies); } @Test public void testIfModuleIsBothTestAndCompileDepThenTreatAsCompileDep() throws IOException { BuildRuleResolver ruleResolver = new BuildRuleResolver(); // Create a java_library() and a java_test() that both depend on Guava. // When they are part of the same project_config() rule, then the resulting module should // include Guava as scope="COMPILE" in IntelliJ. PrebuiltJarRule guava = ruleResolver.buildAndAddToIndex( PrebuiltJarRule.newPrebuiltJarRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//third_party/java/guava:guava")) .setBinaryJar("third_party/java/guava.jar") .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); ruleResolver.buildAndAddToIndex( DefaultJavaLibraryRule.newJavaLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//java/com/example/base:base")) .addDep(BuildTargetFactory.newInstance("//third_party/java/guava:guava"))); ruleResolver.buildAndAddToIndex( JavaTestRule.newJavaTestRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//java/com/example/base:tests")) .addDep(BuildTargetFactory.newInstance("//third_party/java/guava:guava"))); ProjectConfigRule projectConfig = ruleResolver.buildAndAddToIndex( ProjectConfigRule.newProjectConfigRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//java/com/example/base:project_config")) .setSrcTarget(Optional.of(BuildTargetFactory.newInstance("//java/com/example/base:base"))) .setTestTarget(Optional.of(BuildTargetFactory.newInstance("//java/com/example/base:tests"))) .setTestRoots(ImmutableList.of("tests"))); ProjectWithModules projectWithModules = getModulesForPartialGraph(ruleResolver, ImmutableList.of(projectConfig), null /* javaPackageFinder */); List<Module> modules = projectWithModules.modules; assertEquals(1, modules.size()); Module comExampleBaseModule = Iterables.getOnlyElement(modules); assertListEquals(ImmutableList.of( DependentModule.newSourceFolder(), DependentModule.newLibrary(guava.getBuildTarget(), "third_party_java_guava_jar"), DependentModule.newStandardJdk()), comExampleBaseModule.dependencies); } /** * In the context of Robolectric, httpcore-4.0.1.jar needs to be loaded before the android.jar * associated with the Android SDK. Both httpcore-4.0.1.jar and android.jar define * org.apache.http.params.BasicHttpParams; however, only httpcore-4.0.1.jar contains a real * implementation of BasicHttpParams whereas android.jar contains a stub implementation of * BasicHttpParams. * <p> * One way to fix this problem would be to "tag" httpcore-4.0.1.jar to indicate that it must * appear before the Android SDK (or anything that transitively depends on the Android SDK) when * listing dependencies for IntelliJ. This would be a giant kludge to the prebuilt_jar rule, so * instead we just list jars before modules within an &lt;orderEntry scope="TEST"/> or an * &lt;orderEntry scope="COMPILE"/> group. */ @Test public void testThatJarsAreListedBeforeModules() throws IOException { BuildRuleResolver ruleResolver = new BuildRuleResolver(); JavaLibraryRule supportV4 = ruleResolver.buildAndAddToIndex( DefaultJavaLibraryRule.newJavaLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//java/com/android/support/v4:v4")) .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); PrebuiltJarRule httpCore = ruleResolver.buildAndAddToIndex( PrebuiltJarRule.newPrebuiltJarRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//third_party/java/httpcore:httpcore")) .setBinaryJar("httpcore-4.0.1.jar") .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); // The support-v4 library is loaded as a java_library() rather than a prebuilt_jar() because it // contains our local changes to the library. BuildTarget robolectricTarget = BuildTargetFactory.newInstance("//third_party/java/robolectric:robolectric"); ruleResolver.buildAndAddToIndex( DefaultJavaLibraryRule.newJavaLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(robolectricTarget) .addDep(BuildTargetFactory.newInstance("//java/com/android/support/v4:v4")) .addDep(BuildTargetFactory.newInstance("//third_party/java/httpcore:httpcore"))); ProjectConfigRule projectConfig = ruleResolver.buildAndAddToIndex( ProjectConfigRule.newProjectConfigRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance( "//third_party/java/robolectric:project_config")) .setSrcTarget(Optional.of(robolectricTarget)) .setSrcRoots(ImmutableList.of("src/main/java"))); ProjectWithModules projectWithModules = getModulesForPartialGraph(ruleResolver, ImmutableList.of(projectConfig), null /* javaPackageFinder */); List<Module> modules = projectWithModules.modules; assertEquals("Should be one module for the android_library", 1, modules.size()); Module robolectricModule = Iterables.getOnlyElement(modules); assertListEquals( "It is imperative that httpcore-4.0.1.jar be listed before the support v4 library, " + "or else when robolectric is listed as a dependency, " + "org.apache.http.params.BasicHttpParams will be loaded from android.jar instead of " + "httpcore-4.0.1.jar.", ImmutableList.of( DependentModule.newSourceFolder(), DependentModule.newLibrary(httpCore.getBuildTarget(), "httpcore_4_0_1_jar"), DependentModule.newModule( supportV4.getBuildTarget(), "module_java_com_android_support_v4"), DependentModule.newStandardJdk()), robolectricModule.dependencies); } @Test public void testCreatePathToProjectDotPropertiesFileForModule() { Module rootModule = new Module(null /* buildRule */, BuildTargetFactory.newInstance("//:project_config")); rootModule.pathToImlFile = "fb4a.iml"; assertEquals("project.properties", Project.createPathToProjectDotPropertiesFileFor(rootModule)); Module someModule = new Module(null /* buildRule */, BuildTargetFactory.newInstance("//java/com/example/base:project_config")); someModule.pathToImlFile = "java/com/example/base/base.iml"; assertEquals("java/com/example/base/project.properties", Project.createPathToProjectDotPropertiesFileFor(someModule)); } /** * A project_config()'s src_roots argument can be {@code None}, {@code []}, or a non-empty array. * Each of these should be treated differently. */ @Test public void testSrcRoots() throws IOException { // Create a project_config() with src_roots=None. BuildRuleResolver ruleResolver1 = new BuildRuleResolver(); ruleResolver1.buildAndAddToIndex( AndroidResourceRule.newAndroidResourceRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//resources/com/example:res"))); ProjectConfigRule projectConfigNullSrcRoots = ruleResolver1.buildAndAddToIndex( ProjectConfigRule.newProjectConfigRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//resources/com/example:project_config")) .setSrcTarget(Optional.of(BuildTargetFactory.newInstance("//resources/com/example:res"))) .setSrcRoots(null)); ProjectWithModules projectWithModules1 = getModulesForPartialGraph(ruleResolver1, ImmutableList.of(projectConfigNullSrcRoots), null /* javaPackageFinder */); // Verify that the correct source folders are created. assertEquals(1, projectWithModules1.modules.size()); Module moduleNoJavaSource = projectWithModules1.modules.get(0); assertListEquals( "Only source tmp should be gen/ when setSrcRoots(null) is specified.", ImmutableList.of(SourceFolder.GEN), moduleNoJavaSource.sourceFolders); // Create a project_config() with src_roots=[]. BuildRuleResolver ruleResolver2 = new BuildRuleResolver(); ruleResolver2.buildAndAddToIndex( AndroidLibraryRule.newAndroidLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//java/com/example/base:base"))); ProjectConfigRule inPackageProjectConfig = ruleResolver2.buildAndAddToIndex( ProjectConfigRule.newProjectConfigRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//java/com/example/base:project_config")) .setSrcTarget(Optional.of(BuildTargetFactory.newInstance("//java/com/example/base:base"))) .setSrcRoots(ImmutableList.<String>of())); // Verify that the correct source folders are created. JavaPackageFinder javaPackageFinder = EasyMock.createMock(JavaPackageFinder.class); EasyMock.expect(javaPackageFinder.findJavaPackageForPath( "java/com/example/base/module_java_com_example_base.iml")).andReturn("com.example.base"); EasyMock.replay(javaPackageFinder); ProjectWithModules projectWithModules2 = getModulesForPartialGraph(ruleResolver2, ImmutableList.of(inPackageProjectConfig), javaPackageFinder); EasyMock.verify(javaPackageFinder); assertEquals(1, projectWithModules2.modules.size()); Module moduleWithPackagePrefix = projectWithModules2.modules.get(0); assertListEquals( "The current directory should be a source tmp with a package prefix " + "as well as the gen/ directory.", ImmutableList.of( new SourceFolder("file://$MODULE_DIR$", false /* isTestSource */, "com.example.base"), SourceFolder.GEN), moduleWithPackagePrefix.sourceFolders); // Create a project_config() with src_roots=['src']. BuildRuleResolver ruleResolver3 = new BuildRuleResolver(); ruleResolver3.buildAndAddToIndex( AndroidLibraryRule.newAndroidLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//java/com/example/base:base"))); ProjectConfigRule hasSrcFolderProjectConfig = ruleResolver3.buildAndAddToIndex( ProjectConfigRule.newProjectConfigRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//java/com/example/base:project_config")) .setSrcTarget(Optional.of(BuildTargetFactory.newInstance("//java/com/example/base:base"))) .setSrcRoots(ImmutableList.of("src"))); ProjectWithModules projectWithModules3 = getModulesForPartialGraph(ruleResolver3, ImmutableList.of(hasSrcFolderProjectConfig), null /* javaPackageFinder */); // Verify that the correct source folders are created. assertEquals(1, projectWithModules3.modules.size()); Module moduleHasSrcFolder = projectWithModules3.modules.get(0); assertListEquals( "Both src/ and gen/ should be source folders.", ImmutableList.of( new SourceFolder("file://$MODULE_DIR$/src", false /* isTestSource */), SourceFolder.GEN), moduleHasSrcFolder.sourceFolders); } private static class ProjectWithModules { private final Project project; private final ImmutableList<Module> modules; private ProjectWithModules(Project project, ImmutableList<Module> modules) { this.project = project; this.modules = modules; } } private ProjectWithModules getModulesForPartialGraph( BuildRuleResolver ruleResolver, ImmutableList<ProjectConfigRule> projectConfigs, @Nullable JavaPackageFinder javaPackageFinder) throws IOException { if (javaPackageFinder == null) { javaPackageFinder = EasyMock.createMock(JavaPackageFinder.class); } DependencyGraph graph = RuleMap.createGraphFromBuildRules(ruleResolver); List<BuildTarget> targets = ImmutableList.copyOf(Iterables.transform(projectConfigs, new Function<ProjectConfigRule, BuildTarget>() { @Override public BuildTarget apply(ProjectConfigRule rule) { return rule.getBuildTarget(); } })); PartialGraph partialGraph = PartialGraphFactory.newInstance(graph, targets); // Create the Project. ExecutionContext executionContext = EasyMock.createMock(ExecutionContext.class); ProjectFilesystem projectFilesystem = EasyMock.createMock(ProjectFilesystem.class); Properties keystoreProperties = new Properties(); keystoreProperties.put("key.alias", "androiddebugkey"); keystoreProperties.put("key.store.password", "android"); keystoreProperties.put("key.alias.password", "android"); EasyMock.expect(projectFilesystem.readPropertiesFile( "keystore/debug.keystore.properties")) .andReturn(keystoreProperties).anyTimes(); ImmutableMap<String, String> basePathToAliasMap = ImmutableMap.of(); Project project = new Project( partialGraph, basePathToAliasMap, javaPackageFinder, executionContext, projectFilesystem, /* pathToDefaultAndroidManifest */ Optional.<String>absent(), /* pathToPostProcessScript */ Optional.<String>absent(), BuckTestConstant.PYTHON_INTERPRETER); // Execute Project's business logic. EasyMock.replay(executionContext, projectFilesystem); List<Module> modules = project.createModulesForProjectConfigs(); EasyMock.verify(executionContext, projectFilesystem); return new ProjectWithModules(project, ImmutableList.copyOf(modules)); } private static BuildRule getRuleById(String id, PartialGraph graph) { String[] parts = id.split(":"); BuildRule rule = graph.getDependencyGraph().findBuildRuleByTarget( new BuildTarget(parts[0], parts[1])); Preconditions.checkNotNull(rule, "No rule for %s", id); return rule; } @Test public void testNdkLibraryHasCorrectPath() throws IOException { BuildRuleResolver ruleResolver = new BuildRuleResolver(); // Build up a the graph that corresponds to: // // ndk_library( // name = 'foo-jni' // ) // // project_config( // src_target = ':foo-jni', // ) BuildTarget fooJni = BuildTargetFactory.newInstance("//third_party/java/foo/jni:foo-jni"); BuildRule ndkLibrary = ruleResolver.buildAndAddToIndex( NdkLibrary.newNdkLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(fooJni) .addSrc("Android.mk") .addVisibilityPattern(new SingletonBuildTargetPattern("//third_party/java/foo:foo"))); ProjectConfigRule ndkProjectConfig = ruleResolver.buildAndAddToIndex( ProjectConfigRule.newProjectConfigRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//third_party/java/foo/jni:project_config")) .setSrcTarget(Optional.of(fooJni))); ProjectWithModules projectWithModules = getModulesForPartialGraph(ruleResolver, ImmutableList.of(ndkProjectConfig), null /* javaPackageFinder */); List<Module> modules = projectWithModules.modules; assertEquals("Should be one module for the ndk_library.", 1, modules.size()); Module androidLibraryModule = Iterables.getOnlyElement(modules); assertListEquals(ImmutableList.of( DependentModule.newSourceFolder(), DependentModule.newInheritedJdk()), androidLibraryModule.dependencies); assertEquals( String.format("../../../../%s", ((NdkLibrary) ndkLibrary.getBuildable()).getLibraryPath()), androidLibraryModule.nativeLibs); } @Test public void shouldThrowAnExceptionIfAModuleIsMissingADependencyWhenGeneratingProjectFiles() throws IOException { BuildRuleResolver ruleResolver = new BuildRuleResolver(); DefaultJavaLibraryRule ex1 = ruleResolver.buildAndAddToIndex( DefaultJavaLibraryRule.newJavaLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//example/parent:ex1")) .addSrc("DoesNotExist.java") .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); DefaultJavaLibraryRule ex2 = ruleResolver.buildAndAddToIndex( DefaultJavaLibraryRule.newJavaLibraryRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//example/child:ex2")) .addSrc("AlsoDoesNotExist.java") .addDep(ex1.getBuildTarget()) .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); DefaultJavaLibraryRule tests = ruleResolver.buildAndAddToIndex( JavaTestRule.newJavaTestRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//example/child:tests")) .addSrc("SomeTestFile.java") .addDep(ex2.getBuildTarget()) .addVisibilityPattern(BuildTargetPattern.MATCH_ALL)); ProjectConfigRule config = ruleResolver.buildAndAddToIndex( ProjectConfigRule.newProjectConfigRuleBuilder(new FakeAbstractBuildRuleBuilderParams()) .setBuildTarget(BuildTargetFactory.newInstance("//example/child:config")) .setSrcTarget(Optional.of(ex2.getBuildTarget())) .setTestTarget(Optional.of(tests.getBuildTarget()))); ProjectWithModules projectWithModules = getModulesForPartialGraph( ruleResolver, ImmutableList.of(config), null); Module module = Iterables.getOnlyElement(projectWithModules.modules); List<Module> modules = projectWithModules.project.createModulesForProjectConfigs(); Map<String, Module> map = projectWithModules.project.buildNameToModuleMap(modules); try { projectWithModules.project.writeProjectDotPropertiesFile(module, map); fail("Should have thrown a HumanReadableException"); } catch (HumanReadableException e) { assertEquals("You must define a project_config() in example/child/BUCK containing " + "//example/parent:ex1. The project_config() in //example/child:config transitively " + "depends on it.", e.getHumanReadableErrorMessage().replace("\\", "/")); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Random; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.client.HdfsDataInputStream; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates; import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.HostFileManager; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter; import org.apache.hadoop.test.PathUtils; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; /** * This class tests the decommissioning of nodes. */ public class TestDecommission { public static final Log LOG = LogFactory.getLog(TestDecommission.class); static final long seed = 0xDEADBEEFL; static final int blockSize = 8192; static final int fileSize = 16384; static final int HEARTBEAT_INTERVAL = 1; // heartbeat interval in seconds static final int BLOCKREPORT_INTERVAL_MSEC = 1000; //block report in msec static final int NAMENODE_REPLICATION_INTERVAL = 1; //replication interval Random myrand = new Random(); Path hostsFile; Path excludeFile; FileSystem localFileSys; Configuration conf; MiniDFSCluster cluster = null; @Before public void setup() throws IOException { conf = new HdfsConfiguration(); // Set up the hosts/exclude files. localFileSys = FileSystem.getLocal(conf); Path workingDir = localFileSys.getWorkingDirectory(); Path dir = new Path(workingDir, PathUtils.getTestDirName(getClass()) + "/work-dir/decommission"); hostsFile = new Path(dir, "hosts"); excludeFile = new Path(dir, "exclude"); // Setup conf conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_REPLICATION_CONSIDERLOAD_KEY, false); conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath()); conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath()); conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 2000); conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, HEARTBEAT_INTERVAL); conf.setInt(DFSConfigKeys.DFS_BLOCKREPORT_INTERVAL_MSEC_KEY, BLOCKREPORT_INTERVAL_MSEC); conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_PENDING_TIMEOUT_SEC_KEY, 4); conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, NAMENODE_REPLICATION_INTERVAL); writeConfigFile(hostsFile, null); writeConfigFile(excludeFile, null); } @After public void teardown() throws IOException { cleanupFile(localFileSys, excludeFile.getParent()); if (cluster != null) { cluster.shutdown(); } } private void writeConfigFile(Path name, ArrayList<String> nodes) throws IOException { // delete if it already exists if (localFileSys.exists(name)) { localFileSys.delete(name, true); } FSDataOutputStream stm = localFileSys.create(name); if (nodes != null) { for (Iterator<String> it = nodes.iterator(); it.hasNext();) { String node = it.next(); stm.writeBytes(node); stm.writeBytes("\n"); } } stm.close(); } private void writeFile(FileSystem fileSys, Path name, int repl) throws IOException { // create and write a file that contains three blocks of data FSDataOutputStream stm = fileSys.create(name, true, fileSys.getConf() .getInt(CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY, 4096), (short) repl, blockSize); byte[] buffer = new byte[fileSize]; Random rand = new Random(seed); rand.nextBytes(buffer); stm.write(buffer); stm.close(); LOG.info("Created file " + name + " with " + repl + " replicas."); } /** * Verify that the number of replicas are as expected for each block in * the given file. * For blocks with a decommissioned node, verify that their replication * is 1 more than what is specified. * For blocks without decommissioned nodes, verify their replication is * equal to what is specified. * * @param downnode - if null, there is no decommissioned node for this file. * @return - null if no failure found, else an error message string. */ private String checkFile(FileSystem fileSys, Path name, int repl, String downnode, int numDatanodes) throws IOException { boolean isNodeDown = (downnode != null); // need a raw stream assertTrue("Not HDFS:"+fileSys.getUri(), fileSys instanceof DistributedFileSystem); HdfsDataInputStream dis = (HdfsDataInputStream) ((DistributedFileSystem)fileSys).open(name); Collection<LocatedBlock> dinfo = dis.getAllBlocks(); for (LocatedBlock blk : dinfo) { // for each block int hasdown = 0; DatanodeInfo[] nodes = blk.getLocations(); for (int j = 0; j < nodes.length; j++) { // for each replica if (isNodeDown && nodes[j].getXferAddr().equals(downnode)) { hasdown++; //Downnode must actually be decommissioned if (!nodes[j].isDecommissioned()) { return "For block " + blk.getBlock() + " replica on " + nodes[j] + " is given as downnode, " + "but is not decommissioned"; } //Decommissioned node (if any) should only be last node in list. if (j != nodes.length - 1) { return "For block " + blk.getBlock() + " decommissioned node " + nodes[j] + " was not last node in list: " + (j + 1) + " of " + nodes.length; } LOG.info("Block " + blk.getBlock() + " replica on " + nodes[j] + " is decommissioned."); } else { //Non-downnodes must not be decommissioned if (nodes[j].isDecommissioned()) { return "For block " + blk.getBlock() + " replica on " + nodes[j] + " is unexpectedly decommissioned"; } } } LOG.info("Block " + blk.getBlock() + " has " + hasdown + " decommissioned replica."); if(Math.min(numDatanodes, repl+hasdown) != nodes.length) { return "Wrong number of replicas for block " + blk.getBlock() + ": " + nodes.length + ", expected " + Math.min(numDatanodes, repl+hasdown); } } return null; } private void cleanupFile(FileSystem fileSys, Path name) throws IOException { assertTrue(fileSys.exists(name)); fileSys.delete(name, true); assertTrue(!fileSys.exists(name)); } /* * decommission one random node and wait for each to reach the * given {@code waitForState}. */ private DatanodeInfo decommissionNode(int nnIndex, ArrayList<DatanodeInfo>decommissionedNodes, AdminStates waitForState) throws IOException { DFSClient client = getDfsClient(cluster.getNameNode(nnIndex), conf); DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE); // // pick one datanode randomly. // int index = 0; boolean found = false; while (!found) { index = myrand.nextInt(info.length); if (!info[index].isDecommissioned()) { found = true; } } String nodename = info[index].getXferAddr(); LOG.info("Decommissioning node: " + nodename); // write nodename into the exclude file. ArrayList<String> nodes = new ArrayList<String>(); if (decommissionedNodes != null) { for (DatanodeInfo dn : decommissionedNodes) { nodes.add(dn.getName()); } } nodes.add(nodename); writeConfigFile(excludeFile, nodes); refreshNodes(cluster.getNamesystem(nnIndex), conf); DatanodeInfo ret = NameNodeAdapter.getDatanode( cluster.getNamesystem(nnIndex), info[index]); waitNodeState(ret, waitForState); return ret; } /* stop decommission of the datanode and wait for each to reach the NORMAL state */ private void recomissionNode(DatanodeInfo decommissionedNode) throws IOException { LOG.info("Recommissioning node: " + decommissionedNode); writeConfigFile(excludeFile, null); refreshNodes(cluster.getNamesystem(), conf); waitNodeState(decommissionedNode, AdminStates.NORMAL); } /* * Wait till node is fully decommissioned. */ private void waitNodeState(DatanodeInfo node, AdminStates state) { boolean done = state == node.getAdminState(); while (!done) { LOG.info("Waiting for node " + node + " to change state to " + state + " current state: " + node.getAdminState()); try { Thread.sleep(HEARTBEAT_INTERVAL * 1000); } catch (InterruptedException e) { // nothing } done = state == node.getAdminState(); } LOG.info("node " + node + " reached the state " + state); } /* Get DFSClient to the namenode */ private static DFSClient getDfsClient(NameNode nn, Configuration conf) throws IOException { return new DFSClient(nn.getNameNodeAddress(), conf); } /* Validate cluster has expected number of datanodes */ private static void validateCluster(DFSClient client, int numDNs) throws IOException { DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE); assertEquals("Number of Datanodes ", numDNs, info.length); } /** Start a MiniDFSCluster * @throws IOException */ private void startCluster(int numNameNodes, int numDatanodes, Configuration conf) throws IOException { cluster = new MiniDFSCluster.Builder(conf) .nnTopology(MiniDFSNNTopology.simpleFederatedTopology(numNameNodes)) .numDataNodes(numDatanodes).build(); cluster.waitActive(); for (int i = 0; i < numNameNodes; i++) { DFSClient client = getDfsClient(cluster.getNameNode(i), conf); validateCluster(client, numDatanodes); } } static void refreshNodes(final FSNamesystem ns, final Configuration conf ) throws IOException { ns.getBlockManager().getDatanodeManager().refreshNodes(conf); } private void verifyStats(NameNode namenode, FSNamesystem fsn, DatanodeInfo node, boolean decommissioning) throws InterruptedException, IOException { // Do the stats check over 10 iterations for (int i = 0; i < 10; i++) { long[] newStats = namenode.getRpcServer().getStats(); // For decommissioning nodes, ensure capacity of the DN is no longer // counted. Only used space of the DN is counted in cluster capacity assertEquals(newStats[0], decommissioning ? node.getDfsUsed() : node.getCapacity()); // Ensure cluster used capacity is counted for both normal and // decommissioning nodes assertEquals(newStats[1], node.getDfsUsed()); // For decommissioning nodes, remaining space from the DN is not counted assertEquals(newStats[2], decommissioning ? 0 : node.getRemaining()); // Ensure transceiver count is same as that DN assertEquals(fsn.getTotalLoad(), node.getXceiverCount()); Thread.sleep(HEARTBEAT_INTERVAL * 1000); // Sleep heart beat interval } } /** * Tests decommission for non federated cluster */ @Test(timeout=360000) public void testDecommission() throws IOException { testDecommission(1, 6); } /** * Tests decommission with replicas on the target datanode cannot be migrated * to other datanodes and satisfy the replication factor. Make sure the * datanode won't get stuck in decommissioning state. */ @Test(timeout = 360000) public void testDecommission2() throws IOException { LOG.info("Starting test testDecommission"); int numNamenodes = 1; int numDatanodes = 4; conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 3); startCluster(numNamenodes, numDatanodes, conf); ArrayList<ArrayList<DatanodeInfo>> namenodeDecomList = new ArrayList<ArrayList<DatanodeInfo>>( numNamenodes); namenodeDecomList.add(0, new ArrayList<DatanodeInfo>(numDatanodes)); Path file1 = new Path("testDecommission2.dat"); int replicas = 4; // Start decommissioning one namenode at a time ArrayList<DatanodeInfo> decommissionedNodes = namenodeDecomList.get(0); FileSystem fileSys = cluster.getFileSystem(0); FSNamesystem ns = cluster.getNamesystem(0); writeFile(fileSys, file1, replicas); int deadDecomissioned = ns.getNumDecomDeadDataNodes(); int liveDecomissioned = ns.getNumDecomLiveDataNodes(); // Decommission one node. Verify that node is decommissioned. DatanodeInfo decomNode = decommissionNode(0, decommissionedNodes, AdminStates.DECOMMISSIONED); decommissionedNodes.add(decomNode); assertEquals(deadDecomissioned, ns.getNumDecomDeadDataNodes()); assertEquals(liveDecomissioned + 1, ns.getNumDecomLiveDataNodes()); // Ensure decommissioned datanode is not automatically shutdown DFSClient client = getDfsClient(cluster.getNameNode(0), conf); assertEquals("All datanodes must be alive", numDatanodes, client.datanodeReport(DatanodeReportType.LIVE).length); assertNull(checkFile(fileSys, file1, replicas, decomNode.getXferAddr(), numDatanodes)); cleanupFile(fileSys, file1); // Restart the cluster and ensure recommissioned datanodes // are allowed to register with the namenode cluster.shutdown(); startCluster(1, 4, conf); cluster.shutdown(); } /** * Tests recommission for non federated cluster */ @Test(timeout=360000) public void testRecommission() throws IOException { testRecommission(1, 6); } /** * Test decommission for federeated cluster */ @Test(timeout=360000) public void testDecommissionFederation() throws IOException { testDecommission(2, 2); } private void testDecommission(int numNamenodes, int numDatanodes) throws IOException { LOG.info("Starting test testDecommission"); startCluster(numNamenodes, numDatanodes, conf); ArrayList<ArrayList<DatanodeInfo>> namenodeDecomList = new ArrayList<ArrayList<DatanodeInfo>>(numNamenodes); for(int i = 0; i < numNamenodes; i++) { namenodeDecomList.add(i, new ArrayList<DatanodeInfo>(numDatanodes)); } Path file1 = new Path("testDecommission.dat"); for (int iteration = 0; iteration < numDatanodes - 1; iteration++) { int replicas = numDatanodes - iteration - 1; // Start decommissioning one namenode at a time for (int i = 0; i < numNamenodes; i++) { ArrayList<DatanodeInfo> decommissionedNodes = namenodeDecomList.get(i); FileSystem fileSys = cluster.getFileSystem(i); FSNamesystem ns = cluster.getNamesystem(i); writeFile(fileSys, file1, replicas); int deadDecomissioned = ns.getNumDecomDeadDataNodes(); int liveDecomissioned = ns.getNumDecomLiveDataNodes(); // Decommission one node. Verify that node is decommissioned. DatanodeInfo decomNode = decommissionNode(i, decommissionedNodes, AdminStates.DECOMMISSIONED); decommissionedNodes.add(decomNode); assertEquals(deadDecomissioned, ns.getNumDecomDeadDataNodes()); assertEquals(liveDecomissioned + 1, ns.getNumDecomLiveDataNodes()); // Ensure decommissioned datanode is not automatically shutdown DFSClient client = getDfsClient(cluster.getNameNode(i), conf); assertEquals("All datanodes must be alive", numDatanodes, client.datanodeReport(DatanodeReportType.LIVE).length); // wait for the block to be replicated int tries = 0; while (tries++ < 20) { try { Thread.sleep(1000); if (checkFile(fileSys, file1, replicas, decomNode.getXferAddr(), numDatanodes) == null) { break; } } catch (InterruptedException ie) { } } assertTrue("Checked if block was replicated after decommission, tried " + tries + " times.", tries < 20); cleanupFile(fileSys, file1); } } // Restart the cluster and ensure recommissioned datanodes // are allowed to register with the namenode cluster.shutdown(); startCluster(numNamenodes, numDatanodes, conf); cluster.shutdown(); } private void testRecommission(int numNamenodes, int numDatanodes) throws IOException { LOG.info("Starting test testRecommission"); startCluster(numNamenodes, numDatanodes, conf); ArrayList<ArrayList<DatanodeInfo>> namenodeDecomList = new ArrayList<ArrayList<DatanodeInfo>>(numNamenodes); for(int i = 0; i < numNamenodes; i++) { namenodeDecomList.add(i, new ArrayList<DatanodeInfo>(numDatanodes)); } Path file1 = new Path("testDecommission.dat"); int replicas = numDatanodes - 1; for (int i = 0; i < numNamenodes; i++) { ArrayList<DatanodeInfo> decommissionedNodes = namenodeDecomList.get(i); FileSystem fileSys = cluster.getFileSystem(i); writeFile(fileSys, file1, replicas); // Decommission one node. Verify that node is decommissioned. DatanodeInfo decomNode = decommissionNode(i, decommissionedNodes, AdminStates.DECOMMISSIONED); decommissionedNodes.add(decomNode); // Ensure decommissioned datanode is not automatically shutdown DFSClient client = getDfsClient(cluster.getNameNode(i), conf); assertEquals("All datanodes must be alive", numDatanodes, client.datanodeReport(DatanodeReportType.LIVE).length); int tries =0; // wait for the block to be replicated while (tries++ < 20) { try { Thread.sleep(1000); if (checkFile(fileSys, file1, replicas, decomNode.getXferAddr(), numDatanodes) == null) { break; } } catch (InterruptedException ie) { } } assertTrue("Checked if block was replicated after decommission, tried " + tries + " times.", tries < 20); // stop decommission and check if the new replicas are removed recomissionNode(decomNode); // wait for the block to be deleted tries = 0; while (tries++ < 20) { try { Thread.sleep(1000); if (checkFile(fileSys, file1, replicas, null, numDatanodes) == null) { break; } } catch (InterruptedException ie) { } } cleanupFile(fileSys, file1); assertTrue("Checked if node was recommissioned " + tries + " times.", tries < 20); LOG.info("tried: " + tries + " times before recommissioned"); } cluster.shutdown(); } /** * Tests cluster storage statistics during decommissioning for non * federated cluster */ @Test(timeout=360000) public void testClusterStats() throws Exception { testClusterStats(1); } /** * Tests cluster storage statistics during decommissioning for * federated cluster */ @Test(timeout=360000) public void testClusterStatsFederation() throws Exception { testClusterStats(3); } public void testClusterStats(int numNameNodes) throws IOException, InterruptedException { LOG.info("Starting test testClusterStats"); int numDatanodes = 1; startCluster(numNameNodes, numDatanodes, conf); for (int i = 0; i < numNameNodes; i++) { FileSystem fileSys = cluster.getFileSystem(i); Path file = new Path("testClusterStats.dat"); writeFile(fileSys, file, 1); FSNamesystem fsn = cluster.getNamesystem(i); NameNode namenode = cluster.getNameNode(i); DatanodeInfo downnode = decommissionNode(i, null, AdminStates.DECOMMISSION_INPROGRESS); // Check namenode stats for multiple datanode heartbeats verifyStats(namenode, fsn, downnode, true); // Stop decommissioning and verify stats writeConfigFile(excludeFile, null); refreshNodes(fsn, conf); DatanodeInfo ret = NameNodeAdapter.getDatanode(fsn, downnode); waitNodeState(ret, AdminStates.NORMAL); verifyStats(namenode, fsn, ret, false); } } /** * Test host/include file functionality. Only datanodes * in the include file are allowed to connect to the namenode in a non * federated cluster. */ @Test(timeout=360000) public void testHostsFile() throws IOException, InterruptedException { // Test for a single namenode cluster testHostsFile(1); } /** * Test host/include file functionality. Only datanodes * in the include file are allowed to connect to the namenode in a * federated cluster. */ @Test(timeout=360000) public void testHostsFileFederation() throws IOException, InterruptedException { // Test for 3 namenode federated cluster testHostsFile(3); } public void testHostsFile(int numNameNodes) throws IOException, InterruptedException { int numDatanodes = 1; cluster = new MiniDFSCluster.Builder(conf) .nnTopology(MiniDFSNNTopology.simpleFederatedTopology(numNameNodes)) .numDataNodes(numDatanodes).setupHostsFile(true).build(); cluster.waitActive(); // Now empty hosts file and ensure the datanode is disallowed // from talking to namenode, resulting in it's shutdown. ArrayList<String>list = new ArrayList<String>(); final String bogusIp = "127.0.30.1"; list.add(bogusIp); writeConfigFile(hostsFile, list); for (int j = 0; j < numNameNodes; j++) { refreshNodes(cluster.getNamesystem(j), conf); DFSClient client = getDfsClient(cluster.getNameNode(j), conf); DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE); for (int i = 0 ; i < 5 && info.length != 0; i++) { LOG.info("Waiting for datanode to be marked dead"); Thread.sleep(HEARTBEAT_INTERVAL * 1000); info = client.datanodeReport(DatanodeReportType.LIVE); } assertEquals("Number of live nodes should be 0", 0, info.length); // Test that non-live and bogus hostnames are considered "dead". // The dead report should have an entry for (1) the DN that is // now considered dead because it is no longer allowed to connect // and (2) the bogus entry in the hosts file (these entries are // always added last) info = client.datanodeReport(DatanodeReportType.DEAD); assertEquals("There should be 2 dead nodes", 2, info.length); DatanodeID id = cluster.getDataNodes().get(0).getDatanodeId(); assertEquals(id.getHostName(), info[0].getHostName()); assertEquals(bogusIp, info[1].getHostName()); } } @Test(timeout=360000) public void testDuplicateHostsEntries() throws IOException, InterruptedException { Configuration hdfsConf = new Configuration(conf); cluster = new MiniDFSCluster.Builder(hdfsConf) .numDataNodes(1).setupHostsFile(true).build(); cluster.waitActive(); int dnPort = cluster.getDataNodes().get(0).getXferPort(); // pick some random ports that don't overlap with our DN's port // or with each other. Random random = new Random(System.currentTimeMillis()); int port1 = dnPort; while (port1 == dnPort) { port1 = random.nextInt(6000) + 1000; } int port2 = dnPort; while ((port2 == dnPort) || (port2 == port1)) { port2 = random.nextInt(6000) + 1000; } // Now empty hosts file and ensure the datanode is disallowed // from talking to namenode, resulting in it's shutdown. ArrayList<String> nodes = new ArrayList<String>(); // These entries will be de-duped by the NameNode, since they refer // to the same IP address + port combo. nodes.add("127.0.0.1:" + port1); nodes.add("localhost:" + port1); nodes.add("127.0.0.1:" + port1); // The following entries should not be de-duped. nodes.add("127.0.0.1:" + port2); nodes.add("127.0.30.1:" + port1); writeConfigFile(hostsFile, nodes); refreshNodes(cluster.getNamesystem(0), hdfsConf); DFSClient client = getDfsClient(cluster.getNameNode(0), hdfsConf); DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE); for (int i = 0 ; i < 5 && info.length != 0; i++) { LOG.info("Waiting for datanode to be marked dead"); Thread.sleep(HEARTBEAT_INTERVAL * 1000); info = client.datanodeReport(DatanodeReportType.LIVE); } assertEquals("Number of live nodes should be 0", 0, info.length); // Test that non-live and bogus hostnames are considered "dead". // The dead report should have an entry for (1) the DN that is // now considered dead because it is no longer allowed to connect // and (2) the bogus entries in the hosts file. DatanodeInfo deadDns[] = client.datanodeReport(DatanodeReportType.DEAD); HashMap<String, DatanodeInfo> deadByXferAddr = new HashMap<String, DatanodeInfo>(); for (DatanodeInfo dn : deadDns) { LOG.info("DEAD DatanodeInfo: xferAddr = " + dn.getXferAddr() + ", ipAddr = " + dn.getIpAddr() + ", hostname = " + dn.getHostName()); deadByXferAddr.put(dn.getXferAddr(), dn); } // The real DataNode should be included in the list. String realDnIpPort = cluster.getDataNodes().get(0). getXferAddress().getAddress().getHostAddress() + ":" + cluster.getDataNodes().get(0).getXferPort(); Assert.assertNotNull("failed to find real datanode IP " + realDnIpPort, deadByXferAddr.remove(realDnIpPort)); // The fake datanode with address 127.0.30.1 should be included in this list. Assert.assertNotNull(deadByXferAddr.remove( "127.0.30.1:" + port1)); // Now look for the two copies of 127.0.0.1 with port1 and port2. Iterator<Map.Entry<String, DatanodeInfo>> iter = deadByXferAddr.entrySet().iterator(); boolean foundPort1 = false, foundPort2 = false; while (iter.hasNext()) { Map.Entry<String, DatanodeInfo> entry = iter.next(); DatanodeInfo dn = entry.getValue(); if (dn.getXferPort() == port1) { foundPort1 = true; iter.remove(); } else if (dn.getXferPort() == port2) { foundPort2 = true; iter.remove(); } } Assert.assertTrue("did not find a dead entry with port " + port1, foundPort1); Assert.assertTrue("did not find a dead entry with port " + port2, foundPort2); Assert.assertTrue(deadByXferAddr.isEmpty()); } @Test(timeout=360000) public void testIncludeByRegistrationName() throws IOException, InterruptedException { Configuration hdfsConf = new Configuration(conf); final String registrationName = "--registration-name--"; final String nonExistentDn = "127.0.0.40"; hdfsConf.set(DFSConfigKeys.DFS_DATANODE_HOST_NAME_KEY, registrationName); cluster = new MiniDFSCluster.Builder(hdfsConf) .numDataNodes(1).checkDataNodeHostConfig(true) .setupHostsFile(true).build(); cluster.waitActive(); // Set up an includes file that doesn't have our datanode. ArrayList<String> nodes = new ArrayList<String>(); nodes.add(nonExistentDn); writeConfigFile(hostsFile, nodes); refreshNodes(cluster.getNamesystem(0), hdfsConf); // Wait for the DN to be marked dead. DFSClient client = getDfsClient(cluster.getNameNode(0), hdfsConf); while (true) { DatanodeInfo info[] = client.datanodeReport(DatanodeReportType.DEAD); if (info.length == 1) { break; } LOG.info("Waiting for datanode to be marked dead"); Thread.sleep(HEARTBEAT_INTERVAL * 1000); } // Use a non-empty include file with our registration name. // It should work. int dnPort = cluster.getDataNodes().get(0).getXferPort(); nodes = new ArrayList<String>(); nodes.add(registrationName + ":" + dnPort); writeConfigFile(hostsFile, nodes); refreshNodes(cluster.getNamesystem(0), hdfsConf); cluster.restartDataNode(0); // Wait for the DN to come back. while (true) { DatanodeInfo info[] = client.datanodeReport(DatanodeReportType.LIVE); if (info.length == 1) { Assert.assertFalse(info[0].isDecommissioned()); Assert.assertFalse(info[0].isDecommissionInProgress()); assertEquals(registrationName, info[0].getHostName()); break; } LOG.info("Waiting for datanode to come back"); Thread.sleep(HEARTBEAT_INTERVAL * 1000); } } @Test(timeout=120000) public void testDecommissionWithOpenfile() throws IOException, InterruptedException { LOG.info("Starting test testDecommissionWithOpenfile"); //At most 4 nodes will be decommissioned startCluster(1, 7, conf); FileSystem fileSys = cluster.getFileSystem(0); FSNamesystem ns = cluster.getNamesystem(0); String openFile = "/testDecommissionWithOpenfile.dat"; writeFile(fileSys, new Path(openFile), (short)3); // make sure the file was open for write FSDataOutputStream fdos = fileSys.append(new Path(openFile)); LocatedBlocks lbs = NameNodeAdapter.getBlockLocations(cluster.getNameNode(0), openFile, 0, fileSize); DatanodeInfo[] dnInfos4LastBlock = lbs.getLastLocatedBlock().getLocations(); DatanodeInfo[] dnInfos4FirstBlock = lbs.get(0).getLocations(); ArrayList<String> nodes = new ArrayList<String>(); ArrayList<DatanodeInfo> dnInfos = new ArrayList<DatanodeInfo>(); for (DatanodeInfo datanodeInfo : dnInfos4FirstBlock) { DatanodeInfo found = datanodeInfo; for (DatanodeInfo dif: dnInfos4LastBlock) { if (datanodeInfo.equals(dif)) { found = null; } } if (found != null) { nodes.add(found.getXferAddr()); dnInfos.add(found); } } //decommission one of the 3 nodes which have last block nodes.add(dnInfos4LastBlock[0].getXferAddr()); dnInfos.add(dnInfos4LastBlock[0]); writeConfigFile(excludeFile, nodes); refreshNodes(ns, conf); for (DatanodeInfo dn : dnInfos) { waitNodeState(dn, AdminStates.DECOMMISSIONED); } fdos.close(); } }
/* * Copyright 2018-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.lettuce.core.cluster.api.sync; import java.util.List; import java.util.Map; import io.lettuce.core.*; import io.lettuce.core.XReadArgs.StreamOffset; import io.lettuce.core.models.stream.PendingMessage; import io.lettuce.core.models.stream.PendingMessages; /** * Synchronous executed commands on a node selection for Streams. * * @param <K> Key type. * @param <V> Value type. * @author Mark Paluch * @since 5.1 * @generated by io.lettuce.apigenerator.CreateSyncNodeSelectionClusterApi */ public interface NodeSelectionStreamCommands<K, V> { /** * Acknowledge one or more messages as processed. * * @param key the stream key. * @param group name of the consumer group. * @param messageIds message Id's to acknowledge. * @return simple-reply the lenght of acknowledged messages. */ Executions<Long> xack(K key, K group, String... messageIds); /** * Append a message to the stream {@code key}. * * @param key the stream key. * @param body message body. * @return simple-reply the message Id. */ Executions<String> xadd(K key, Map<K, V> body); /** * Append a message to the stream {@code key}. * * @param key the stream key. * @param args * @param body message body. * @return simple-reply the message Id. */ Executions<String> xadd(K key, XAddArgs args, Map<K, V> body); /** * Append a message to the stream {@code key}. * * @param key the stream key. * @param keysAndValues message body. * @return simple-reply the message Id. */ Executions<String> xadd(K key, Object... keysAndValues); /** * Append a message to the stream {@code key}. * * @param key the stream key. * @param args * @param keysAndValues message body. * @return simple-reply the message Id. */ Executions<String> xadd(K key, XAddArgs args, Object... keysAndValues); /** * Gets ownership of one or multiple messages in the Pending Entries List of a given stream consumer group. * * @param key the stream key. * @param consumer consumer identified by group name and consumer key. * @param minIdleTime * @param messageIds message Id's to claim. * @return simple-reply the {@link StreamMessage}. */ Executions<List<StreamMessage<K, V>>> xclaim(K key, Consumer<K> consumer, long minIdleTime, String... messageIds); /** * Gets ownership of one or multiple messages in the Pending Entries List of a given stream consumer group. * <p> * Note that setting the {@code JUSTID} flag (calling this method with {@link XClaimArgs#justid()}) suppresses the message * bode and {@link StreamMessage#getBody()} is {@code null}. * * @param key the stream key. * @param consumer consumer identified by group name and consumer key. * @param args * @param messageIds message Id's to claim. * @return simple-reply the {@link StreamMessage}. */ Executions<List<StreamMessage<K, V>>> xclaim(K key, Consumer<K> consumer, XClaimArgs args, String... messageIds); /** * Removes the specified entries from the stream. Returns the number of items deleted, that may be different from the number * of IDs passed in case certain IDs do not exist. * * @param key the stream key. * @param messageIds stream message Id's. * @return simple-reply number of removed entries. */ Executions<Long> xdel(K key, String... messageIds); /** * Create a consumer group. * * @param streamOffset name of the stream containing the offset to set. * @param group name of the consumer group. * @return simple-reply {@code true} if successful. */ Executions<String> xgroupCreate(StreamOffset<K> streamOffset, K group); /** * Create a consumer group. * * @param streamOffset name of the stream containing the offset to set. * @param group name of the consumer group. * @param args * @return simple-reply {@code true} if successful. * @since 5.2 */ Executions<String> xgroupCreate(StreamOffset<K> streamOffset, K group, XGroupCreateArgs args); /** * Delete a consumer from a consumer group. * * @param key the stream key. * @param consumer consumer identified by group name and consumer key. * @return Long integer-reply number of pending messages. */ Executions<Long> xgroupDelconsumer(K key, Consumer<K> consumer); /** * Destroy a consumer group. * * @param key the stream key. * @param group name of the consumer group. * @return simple-reply {@code true} if successful. */ Executions<Boolean> xgroupDestroy(K key, K group); /** * Set the current {@code group} id. * * @param streamOffset name of the stream containing the offset to set. * @param group name of the consumer group. * @return simple-reply OK. */ Executions<String> xgroupSetid(StreamOffset<K> streamOffset, K group); /** * Retrieve information about the stream at {@code key}. * * @param key the stream key. * @return List&lt;Object&gt; array-reply. * @since 5.2 */ Executions<List<Object>> xinfoStream(K key); /** * Retrieve information about the stream consumer groups at {@code key}. * * @param key the stream key. * @return List&lt;Object&gt; array-reply. * @since 5.2 */ Executions<List<Object>> xinfoGroups(K key); /** * Retrieve information about consumer groups of group {@code group} and stream at {@code key}. * * @param key the stream key. * @param group name of the consumer group. * @return List&lt;Object&gt; array-reply. * @since 5.2 */ Executions<List<Object>> xinfoConsumers(K key, K group); /** * Get the length of a steam. * * @param key the stream key. * @return simple-reply the lenght of the stream. */ Executions<Long> xlen(K key); /** * Read pending messages from a stream for a {@code group}. * * @param key the stream key. * @param group name of the consumer group. * @return List&lt;Object&gt; array-reply list pending entries. */ Executions<PendingMessages> xpending(K key, K group); /** * Read pending messages from a stream within a specific {@link Range}. * * @param key the stream key. * @param group name of the consumer group. * @param range must not be {@code null}. * @param limit must not be {@code null}. * @return List&lt;Object&gt; array-reply list with members of the resulting stream. */ Executions<List<PendingMessage>> xpending(K key, K group, Range<String> range, Limit limit); /** * Read pending messages from a stream within a specific {@link Range}. * * @param key the stream key. * @param consumer consumer identified by group name and consumer key. * @param range must not be {@code null}. * @param limit must not be {@code null}. * @return List&lt;Object&gt; array-reply list with members of the resulting stream. */ Executions<List<PendingMessage>> xpending(K key, Consumer<K> consumer, Range<String> range, Limit limit); /** * Read messages from a stream within a specific {@link Range}. * * @param key the stream key. * @param range must not be {@code null}. * @return List&lt;StreamMessage&gt; array-reply list with members of the resulting stream. */ Executions<List<StreamMessage<K, V>>> xrange(K key, Range<String> range); /** * Read messages from a stream within a specific {@link Range} applying a {@link Limit}. * * @param key the stream key. * @param range must not be {@code null}. * @param limit must not be {@code null}. * @return List&lt;StreamMessage&gt; array-reply list with members of the resulting stream. */ Executions<List<StreamMessage<K, V>>> xrange(K key, Range<String> range, Limit limit); /** * Read messages from one or more {@link StreamOffset}s. * * @param streams the streams to read from. * @return List&lt;StreamMessage&gt; array-reply list with members of the resulting stream. */ Executions<List<StreamMessage<K, V>>> xread(StreamOffset<K>... streams); /** * Read messages from one or more {@link StreamOffset}s. * * @param args read arguments. * @param streams the streams to read from. * @return List&lt;StreamMessage&gt; array-reply list with members of the resulting stream. */ Executions<List<StreamMessage<K, V>>> xread(XReadArgs args, StreamOffset<K>... streams); /** * Read messages from one or more {@link StreamOffset}s using a consumer group. * * @param consumer consumer/group. * @param streams the streams to read from. * @return List&lt;StreamMessage&gt; array-reply list with members of the resulting stream. */ Executions<List<StreamMessage<K, V>>> xreadgroup(Consumer<K> consumer, StreamOffset<K>... streams); /** * Read messages from one or more {@link StreamOffset}s using a consumer group. * * @param consumer consumer/group. * @param args read arguments. * @param streams the streams to read from. * @return List&lt;StreamMessage&gt; array-reply list with members of the resulting stream. */ Executions<List<StreamMessage<K, V>>> xreadgroup(Consumer<K> consumer, XReadArgs args, StreamOffset<K>... streams); /** * Read messages from a stream within a specific {@link Range} in reverse order. * * @param key the stream key. * @param range must not be {@code null}. * @return List&lt;StreamMessage&gt; array-reply list with members of the resulting stream. */ Executions<List<StreamMessage<K, V>>> xrevrange(K key, Range<String> range); /** * Read messages from a stream within a specific {@link Range} applying a {@link Limit} in reverse order. * * @param key the stream key. * @param range must not be {@code null}. * @param limit must not be {@code null}. * @return List&lt;StreamMessage&gt; array-reply list with members of the resulting stream. */ Executions<List<StreamMessage<K, V>>> xrevrange(K key, Range<String> range, Limit limit); /** * Trims the stream to {@code count} elements. * * @param key the stream key. * @param count length of the stream. * @return simple-reply number of removed entries. */ Executions<Long> xtrim(K key, long count); /** * Trims the stream to {@code count} elements. * * @param key the stream key. * @param approximateTrimming {@code true} to trim approximately using the {@code ~} flag. * @param count length of the stream. * @return simple-reply number of removed entries. */ Executions<Long> xtrim(K key, boolean approximateTrimming, long count); }
/** * Licensed to Jasig under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Jasig licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portal.groups.filesystem; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertNotNull; import static junit.framework.Assert.assertNull; import static junit.framework.Assert.assertTrue; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.sql.Connection; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.sql.DataSource; import junit.textui.TestRunner; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jasig.portal.EntityIdentifier; import org.jasig.portal.EntityTypes; import org.jasig.portal.groups.EntityImpl; import org.jasig.portal.groups.GroupServiceConfiguration; import org.jasig.portal.groups.GroupsException; import org.jasig.portal.groups.IEntity; import org.jasig.portal.groups.IEntityGroup; import org.jasig.portal.groups.IEntityGroupStore; import org.jasig.portal.groups.IGroupConstants; import org.jasig.portal.groups.IGroupMember; import org.jasig.portal.rdbm.TransientDatasource; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; /** * This class was rewritten to eliminate external dependencies, chiefly * on the composite group service. Although this was mostly achieved, * 2 dependencies remain. It needs a file system to read via java.io, * since this is what is being tested, and it requires a composite * group service configuration document. I will eventually remove these * dependencies but it seemed better to get the test in now. I was * thinking we could eventually use something like Apache Commons VFS * to set up a virtual file system (a future enhancement for the * FileSystem group service). In the meantime, this class must create a * GROUPS_ROOT directory and write to it. The class first tries to * create the directory in the user.home. If unsuccessful, it tries to * create it in the current directory, and if this is unsuccessful, * it dies. * * @author Dan Ellentuck * @version $Revision$ */ @Ignore public class FileSystemGroupsTest { protected static final Log LOG = LogFactory.getLog(FileSystemGroupsTest.class); private static Class GROUP_CLASS; private static Class IPERSON_CLASS; private static String CR = "\n"; private String[] testEntityKeys; private String[] testFileNames; private List testGroupKeys; private int numTestFiles; private int numTestEntities; @Rule public final TemporaryFolder groupsRoot = new TemporaryFolder(); private String GROUPS_ROOT; private String IPERSON_GROUPS_ROOT; private List allFiles = null, directoryFiles = null, keyFiles = null; private final String NON_EXISTENT_ID = "xyzxyzxyz"; private IEntityGroupStore groupStore; private String GROUP_SEPARATOR; private DataSource testDataSource; /** */ protected void addIdsToFile(File f) { final long now = System.currentTimeMillis() / 10; long div = now % 5; div += 5; try { String line = null, start = null; final BufferedWriter bw = new BufferedWriter(new FileWriter(f)); bw.write("# test file written at " + new java.util.Date()); bw.newLine(); bw.write("#"); bw.newLine(); for (int i = 0; i < this.numTestEntities; i++) { start = i > 0 && i % div == 0 ? " " : ""; line = start + this.testEntityKeys[i] + " is entity " + (i + 1); bw.write(line); bw.newLine(); } bw.write("# end of test file "); bw.newLine(); bw.write("#"); bw.newLine(); bw.close(); } // end try catch (final Exception ex) { print("FileSystemGroupsTest.addIdsToFile(): " + ex.getMessage()); } } /** * @return org.jasig.portal.groups.IEntityGroup */ private IEntityGroup findGroup(File file) throws GroupsException { final String key = this.getKeyFromFile(file); return this.findGroup(key); } /** * Note that this is the local, not composite, key. * @return org.jasig.portal.groups.IEntityGroup */ private IEntityGroup findGroup(String key) throws GroupsException { return this.getGroupStore().find(key); } private File getGroupsRoot() { return this.groupsRoot.getRoot(); } /** * @return FileSystemGroupStore */ private FileSystemGroupStore getGroupStore() throws GroupsException { if (this.groupStore == null) { final GroupServiceConfiguration config = new GroupServiceConfiguration(); final Map atts = config.getAttributes(); atts.put("nodeSeparator", IGroupConstants.NODE_SEPARATOR); this.groupStore = new FileSystemGroupStore(config); } return (FileSystemGroupStore) this.groupStore; } /** * @return org.jasig.portal.groups.IEntityGroup */ private String getKeyFromFile(File file) throws GroupsException { String key = file.getPath(); if (key.startsWith(this.GROUPS_ROOT)) { key = key.substring(this.GROUPS_ROOT.length()); if (this.GROUP_SEPARATOR.equals(String.valueOf(FileSystemGroupStore.PERIOD))) { key = key.replace(FileSystemGroupStore.PERIOD, FileSystemGroupStore.SUBSTITUTE_PERIOD); } } return key; } /** * @return org.jasig.portal.groups.IEntity */ private IEntity getNewIPersonEntity(String key) throws GroupsException { return this.getNewEntity(IPERSON_CLASS, key); } /** * @return org.jasig.portal.groups.IEntity */ private IEntity getNewEntity(Class type, String key) throws GroupsException { return new EntityImpl(key, type); } /** * @return java.lang.String * @param length int */ private String getRandomString(java.util.Random r, int length) { final char[] chars = new char[length]; for (int i = 0; i < length; i++) { final int diff = r.nextInt(25); final int charValue = 'A' + diff; chars[i] = (char) charValue; } return new String(chars); } /** * Starts the application. * @param args an array of command-line arguments */ public static void main(java.lang.String[] args) throws Exception { final String[] mainArgs = { "org.jasig.portal.groups.filesystem.FileSystemGroupsTest" }; print("START TESTING FILESYSTEM GROUP STORE" + CR); TestRunner.main(mainArgs); print(CR + "END TESTING FILESYSTEM GROUP STORE"); } /** * @param msg java.lang.String */ private static void print(String msg) { LOG.debug(msg); } /** */ @Before public void setUp() throws Exception { if (GROUP_CLASS == null) { GROUP_CLASS = Class.forName("org.jasig.portal.groups.IEntityGroup"); } if (IPERSON_CLASS == null) { IPERSON_CLASS = Class.forName("org.jasig.portal.security.IPerson"); } this.numTestEntities = 10; this.numTestFiles = 2; this.allFiles = new ArrayList(); this.directoryFiles = new ArrayList(); this.keyFiles = new ArrayList(); final char sep = this.getGroupStore().getGoodSeparator(); File iPersonGroupsRootDir; String fileName = null; File f = null, ff = null, fff = null; int i = 0, j = 0, k = 0; final int totalNumTestFiles = this.numTestFiles + this.numTestFiles * this.numTestFiles + this.numTestFiles * this.numTestFiles * this.numTestFiles; // Entities and their keys: this.testEntityKeys = new String[this.numTestEntities]; java.util.Random random = new java.util.Random(); for (i = 0; i < this.numTestEntities; i++) { this.testEntityKeys[i] = this.getRandomString(random, 3) + i; } // File names: this.testFileNames = new String[totalNumTestFiles]; random = new java.util.Random(); for (i = 0; i < totalNumTestFiles; i++) { this.testFileNames[i] = this.getRandomString(random, 3) + i; } // GroupKeys: this.testGroupKeys = new ArrayList(); // Create directory structure: final File gr = this.getGroupsRoot(); if (gr == null) { throw new RuntimeException("COULD NOT CREATE GROUPS ROOT DIRECTORY!!!\n" + "You must have WRITE permission on either user.home or the current directory.\n" + "Could not create groups root directory."); } final String tempGroupsRoot = gr.getAbsolutePath(); this.getGroupStore().setGroupsRootPath(tempGroupsRoot); this.GROUPS_ROOT = this.getGroupStore().getGroupsRootPath(); this.GROUP_SEPARATOR = IGroupConstants.NODE_SEPARATOR; // initialize composite service: // GroupService.findGroup("local" + GROUP_SEPARATOR + "0"); this.IPERSON_GROUPS_ROOT = this.GROUPS_ROOT + IPERSON_CLASS.getName(); iPersonGroupsRootDir = new File(this.IPERSON_GROUPS_ROOT); if (!iPersonGroupsRootDir.exists()) { iPersonGroupsRootDir.mkdir(); this.allFiles.add(iPersonGroupsRootDir); } int fileNameIdx = 0; for (i = 0; i < this.numTestFiles; i++) { fileName = iPersonGroupsRootDir.getPath() + sep + this.testFileNames[fileNameIdx++]; f = new File(fileName); f.mkdir(); this.allFiles.add(f); this.directoryFiles.add(f); for (j = this.numTestFiles; j < this.numTestFiles * 2; j++) { fileName = f.getPath() + sep + this.testFileNames[fileNameIdx++]; ff = new File(fileName); ff.mkdir(); this.allFiles.add(ff); this.directoryFiles.add(ff); for (k = this.numTestFiles * 2; k < this.numTestFiles * 3; k++) { fileName = ff.getPath() + sep + this.testFileNames[fileNameIdx++]; fff = new File(fileName); fff.createNewFile(); this.addIdsToFile(fff); this.allFiles.add(fff); this.keyFiles.add(fff); } } } this.testDataSource = new TransientDatasource(); final Connection con = this.testDataSource.getConnection(); con.prepareStatement("CREATE TABLE UP_ENTITY_TYPE " + "(ENTITY_TYPE_ID INTEGER, " + "ENTITY_TYPE_NAME VARCHAR(1000), " + "DESCRIPTIVE_NAME VARCHAR(1000))").execute(); con.prepareStatement("INSERT INTO UP_ENTITY_TYPE " + "VALUES (1, 'java.lang.Object', 'Generic')").execute(); con.prepareStatement("INSERT INTO UP_ENTITY_TYPE " + "VALUES (2, 'org.jasig.portal.security.IPerson', 'IPerson')").execute(); con.prepareStatement("INSERT INTO UP_ENTITY_TYPE " + "VALUES (3, 'org.jasig.portal.groups.IEntityGroup', 'Group')").execute(); con.prepareStatement("INSERT INTO UP_ENTITY_TYPE " + "VALUES (4, 'org.jasig.portal.ChannelDefinition', 'Channel')").execute(); con.prepareStatement("INSERT INTO UP_ENTITY_TYPE " + "VALUES (5, 'org.jasig.portal.groups.IEntity', 'Grouped Entity')").execute(); con.close(); // initialize EntityTypes // EntityTypes.singleton(this.testDataSource); // print("Leaving FileSystemGroupsTest.setUp()" + CR); } /** */ @After public void tearDown() throws Exception { this.testEntityKeys = null; this.testFileNames = null; this.testGroupKeys = null; final File[] oldFiles = (File[]) this.allFiles.toArray(new File[this.allFiles.size()]); for (int i = oldFiles.length; i > 0; i--) { oldFiles[i - 1].delete(); } this.getGroupsRoot().delete(); this.allFiles = null; this.directoryFiles = null; this.keyFiles = null; this.groupStore = null; final Connection con = this.testDataSource.getConnection(); con.prepareStatement("DROP TABLE UP_ENTITY_TYPE").execute(); con.prepareStatement("SHUTDOWN").execute(); con.close(); this.testDataSource = null; } /** * Tests IEntityGroupStore.find(), which returns an instance of IEntityGroup * given a key. */ @Test public void testFind() throws Exception { print("***** ENTERING FilesyStemGroupsTester.testFind() *****" + CR); String msg = null; String existingKey = null, nonExistingKey = null; IEntityGroup existingGroup = null; File f = null; msg = "Finding existing groups by key..."; print(msg); for (final Iterator itr = this.directoryFiles.iterator(); itr.hasNext();) { f = (File) itr.next(); existingKey = this.getKeyFromFile(f); msg = "Finding group key " + existingKey; existingGroup = this.getGroupStore().find(existingKey); assertNotNull(msg, existingGroup); } nonExistingKey = existingKey + "x"; msg = "Finding non-existing key: " + nonExistingKey; print(msg); existingGroup = this.getGroupStore().find(nonExistingKey); assertNull(msg, existingGroup); print("Test completed successfully." + CR); print("***** LEAVING FileSystemGroupsTest.testFind() *****" + CR); } /** * Tests IEntityGroupStore.findContainingGroups() for both an IEntity and * an IEntityGroup. */ @Test public void testFindContainingGroups() throws Exception { print("***** ENTERING FileSystemGroupsTest.testFindContainingGroups() *****" + CR); String msg = null; final String groupKey = null; IEntityGroup group = null, containingGroup = null; IEntity ent = null; File f = null; Iterator itr = null; final List containingGroups = new ArrayList(); msg = "Finding containing groups for entity keys..."; print(msg); for (final String testEntityKey : this.testEntityKeys) { ent = this.getNewIPersonEntity(testEntityKey); msg = "Finding containing groups for " + ent; print(msg); containingGroups.clear(); for (itr = this.getGroupStore().findContainingGroups(ent); itr.hasNext();) { group = (IEntityGroup) itr.next(); containingGroups.add(group); assertTrue(msg, group instanceof IEntityGroup); } assertEquals(msg, this.keyFiles.size(), containingGroups.size()); } ent = this.getNewIPersonEntity(this.NON_EXISTENT_ID); msg = "Finding containing groups for non-existent key: " + this.NON_EXISTENT_ID; print(msg); containingGroups.clear(); for (itr = this.getGroupStore().findContainingGroups(ent); itr.hasNext();) { containingGroups.add(itr.next()); } assertEquals(msg, 0, containingGroups.size()); msg = "Finding containing groups for groups..."; print(msg); // Each file that contains keys should have 1 and only 1 containing group. for (itr = this.keyFiles.iterator(); itr.hasNext();) { f = (File) itr.next(); group = this.findGroup(f); assertTrue(msg, group instanceof IEntityGroup); containingGroups.clear(); for (final Iterator cg = this.getGroupStore().findContainingGroups(group); cg.hasNext();) { containingGroup = (IEntityGroup) cg.next(); assertTrue(msg, containingGroup instanceof IEntityGroup); containingGroups.add(containingGroup); } assertEquals(msg, 1, containingGroups.size()); } msg = "Finding containing groups for a non-existent type..."; print(msg); ent = this.getNewEntity(new Object().getClass(), this.testEntityKeys[0]); itr = this.getGroupStore().findContainingGroups(ent); final boolean hasContainingGroup = itr.hasNext(); assertTrue(msg, !hasContainingGroup); print("Test completed successfully." + CR); print("***** LEAVING FileSystemGroupsTest.testFindContainingGroups() *****" + CR); } /** * Tests IEntityGroupStore.findMemberGroups(), findContainingGroups */ @Test public void testFindEmbeddedMemberGroups() throws Exception { print("***** ENTERING FileSystemGroupsTest.testFindEmbeddedMemberGroups() *****" + CR); String msg = null; IEntityGroup group = null, memberGroup = null; File f = null, f2 = null; String memberKeys[] = null; f = (File) this.keyFiles.get(this.keyFiles.size() - 1); // member f2 = (File) this.keyFiles.get(this.keyFiles.size() - 2); // group final String memberKey = this.getKeyFromFile(f); final String groupKey = this.getKeyFromFile(f2); msg = "Now adding member group key " + memberKey + " to " + groupKey; print(msg); final BufferedWriter bw = new BufferedWriter(new FileWriter(f2.getPath(), true)); bw.write("group:" + memberKey); bw.newLine(); bw.close(); msg = "Finding member group keys for key file " + groupKey; print(msg); group = this.findGroup(f2); assertTrue(msg, group instanceof IEntityGroup); memberKeys = this.getGroupStore().findMemberGroupKeys(group); assertEquals(msg, 1, memberKeys.length); memberGroup = this.findGroup(memberKeys[0]); assertNotNull(msg, memberGroup); assertTrue(msg, this.getGroupStore().contains(group, memberGroup)); print("Test completed successfully." + CR); print("***** LEAVING FileSystemGroupsTest.testFindEmbeddedMemberGroups() *****" + CR); } /** * Tests IEntityGroupStore.findEntitiesForGroup(). */ @Test public void testFindEntitiesForGroup() throws Exception { print("***** ENTERING FileSystemGroupsTest.testFindEntitiesForGroup() *****" + CR); String msg = null; IEntityGroup group = null; String entityKey = null; File f = null, f2 = null; Iterator itr = null; final List memberEntities = new ArrayList(); msg = "Finding entities for files..."; print(msg); for (itr = this.keyFiles.iterator(); itr.hasNext();) { f = (File) itr.next(); msg = "finding group: " + f; group = this.findGroup(f); assertTrue(msg, group instanceof IEntityGroup); memberEntities.clear(); for (final Iterator members = this.getGroupStore().getEntityIdsFromFile(f).iterator(); members.hasNext();) { entityKey = (String) members.next(); assertTrue(msg, entityKey != null); assertTrue(msg, entityKey.length() > 0); memberEntities.add(entityKey); } assertEquals(msg, this.numTestEntities, memberEntities.size()); } f = (File) this.keyFiles.get(0); f2 = f.getParentFile(); msg = "Finding entities for " + f2 + " (should have none)."; group = this.findGroup(f2); assertTrue(msg, group instanceof IEntityGroup); final boolean hasEntities = this.getGroupStore().findEntitiesForGroup(group).hasNext(); assertTrue(msg, !hasEntities); print("Test completed successfully." + CR); print("***** LEAVING FileSystemGroupsTest.testFindEntitiesForGroup() *****" + CR); } /** * Tests IEntityGroupStore.findMemberGroupKeys(). */ @Test public void testFindMemberGroupKeys() throws Exception { print("***** ENTERING FileSystemGroupsTest.testFindMemberGroupKeys() *****" + CR); String msg = null; IEntityGroup group = null, memberGroup = null; File f = null; final File f2 = null; Iterator itr = null; String memberKeys[] = null; msg = "Finding member group keys for directory files..."; print(msg); for (itr = this.directoryFiles.iterator(); itr.hasNext();) { f = (File) itr.next(); msg = "Finding member group keys for group: " + f; group = this.findGroup(f); assertTrue(msg, group instanceof IEntityGroup); memberKeys = this.getGroupStore().findMemberGroupKeys(group); assertEquals(msg, this.numTestFiles, memberKeys.length); for (final String memberKey : memberKeys) { memberGroup = this.findGroup(memberKey); assertNotNull(msg, memberGroup); assertTrue(msg, this.getGroupStore().contains(group, memberGroup)); } } msg = "Finding member group keys for key files..."; print(msg); for (itr = this.keyFiles.iterator(); itr.hasNext();) { f = (File) itr.next(); msg = "Finding member group keys for group: " + f; group = this.findGroup(f); assertTrue(msg, group instanceof IEntityGroup); memberKeys = this.getGroupStore().findMemberGroupKeys(group); assertEquals(msg, 0, memberKeys.length); } print("Test completed successfully." + CR); print("***** LEAVING FileSystemGroupsTest.testFindMemberGroupKeys() *****" + CR); } /** * Tests IEntityGroupStore.findMemberGroups(). */ @Test public void testFindMemberGroups() throws Exception { print("***** ENTERING FileSystemGroupsTest.testFindMemberGroups() *****" + CR); String msg = null, groupKey = null; IEntityGroup group = null, memberGroup = null; File f = null; final File f2 = null; Iterator itr = null; Iterator memberGroups = null; msg = "Finding member groups for directory files..."; print(msg); for (itr = this.directoryFiles.iterator(); itr.hasNext();) { f = (File) itr.next(); msg = "Finding member groups for group: " + f; group = this.findGroup(f); assertTrue(msg, group instanceof IEntityGroup); memberGroups = this.getGroupStore().findMemberGroups(group); while (memberGroups.hasNext()) { memberGroup = (IEntityGroup) memberGroups.next(); assertNotNull(msg, memberGroup); groupKey = memberGroup.getKey(); memberGroup = this.findGroup(groupKey); assertTrue(msg, this.getGroupStore().contains(group, memberGroup)); } } msg = "Finding member groups for key files..."; print(msg); for (itr = this.keyFiles.iterator(); itr.hasNext();) { f = (File) itr.next(); msg = "Finding member groups for group: " + f; group = this.findGroup(f); assertTrue(msg, group instanceof IEntityGroup); memberGroups = this.getGroupStore().findMemberGroups(group); assertTrue(msg, !memberGroups.hasNext()); } print("Test completed successfully." + CR); print("***** LEAVING FileSystemGroupsTest.testFindMemberGroups() *****" + CR); } /** * Tests IEntityGroupStore.searchForGroups(), which returns EntityIdentifier[] given * a search string. */ @Test public void testSearchForGroups() throws Exception { print("***** ENTERING FileSystemGroupsTest.testSearchForGroups() *****" + CR); String msg = null; String is = null, startsWith = null, endsWith = null, contains = null, badQuery = null; final Class type = IPERSON_CLASS; final IEntityGroup existingGroup = null; IGroupMember member = null; EntityIdentifier[] ids = null; msg = "Searching for existing groups..."; print(msg); for (final String testFileName : this.testFileNames) { is = testFileName; startsWith = is.substring(0, (is.length() - 1)); endsWith = is.substring(1); contains = is.substring(1, (is.length() - 1)); badQuery = is + " a b c"; msg = "Searching for IS " + is; ids = this.getGroupStore().searchForGroups(is, IGroupConstants.IS, type); assertEquals(msg, ids.length, 1); member = this.findGroup(ids[0].getKey()); assertTrue(msg, member.isGroup()); msg = "Searching for STARTS WITH " + startsWith; ids = this.getGroupStore().searchForGroups(startsWith, IGroupConstants.STARTS_WITH, type); assertTrue(msg, ids.length > 0); msg = "Searching for ENDS WITH " + endsWith; ids = this.getGroupStore().searchForGroups(endsWith, IGroupConstants.ENDS_WITH, type); assertTrue(msg, ids.length > 0); msg = "Searching for CONTAINS " + contains; ids = this.getGroupStore().searchForGroups(contains, IGroupConstants.CONTAINS, type); assertTrue(msg, ids.length > 0); msg = "Searching for IS " + badQuery; ids = this.getGroupStore().searchForGroups(badQuery, IGroupConstants.IS, type); assertEquals(msg, ids.length, 0); } print("Test completed successfully." + CR); print("***** LEAVING FileSystemGroupsTest.testSearchForGroups() *****" + CR); } }
package com.example.androidthings.EddieBalance.imu; import android.util.Log; import com.google.android.things.pio.I2cDevice; import com.google.android.things.pio.PeripheralManagerService; import java.io.IOException; import java.util.List; import static java.lang.Math.*; import static java.lang.Thread.sleep; /** * Created by anthony on 1/21/2017. */ public class imu { //////////////////////////// // LSM9DS0 Gyro Registers // //////////////////////////// private static final int WHO_AM_I_G =0x0F; private static final int CTRL_REG1_G =0x20; private static final int CTRL_REG2_G =0x21; private static final int CTRL_REG3_G =0x22; private static final int CTRL_REG4_G =0x23; private static final int CTRL_REG5_G =0x24; private static final int REFERENCE_G =0x25; private static final int STATUS_REG_G =0x27; private static final int OUT_X_L_G =0x28; private static final int OUT_X_H_G =0x29; private static final int OUT_Y_L_G =0x2A; private static final int OUT_Y_H_G =0x2B; private static final int OUT_Z_L_G =0x2C; private static final int OUT_Z_H_G =0x2D; private static final int FIFO_CTRL_REG_G =0x2E; private static final int FIFO_SRC_REG_G =0x2F; private static final int INT1_CFG_G =0x30; private static final int INT1_SRC_G =0x31; private static final int INT1_THS_XH_G =0x32; private static final int INT1_THS_XL_G =0x33; private static final int INT1_THS_YH_G =0x34; private static final int INT1_THS_YL_G =0x35; private static final int INT1_THS_ZH_G =0x36; private static final int INT1_THS_ZL_G =0x37; private static final int INT1_DURATION_G =0x38; ////////////////////////////////////////// // LSM9DS0 Accel/Magneto (XM) Registers // ////////////////////////////////////////// private static final int OUT_TEMP_L_XM =0x05; private static final int OUT_TEMP_H_XM =0x06; private static final int STATUS_REG_M =0x07; private static final int OUT_X_L_M =0x08; private static final int OUT_X_H_M =0x09; private static final int OUT_Y_L_M =0x0A; private static final int OUT_Y_H_M =0x0B; private static final int OUT_Z_L_M =0x0C; private static final int OUT_Z_H_M =0x0D; private static final int WHO_AM_I_XM =0x0F; private static final int INT_CTRL_REG_M =0x12; private static final int INT_SRC_REG_M =0x13; private static final int INT_THS_L_M =0x14; private static final int INT_THS_H_M =0x15; private static final int OFFSET_X_L_M =0x16; private static final int OFFSET_X_H_M =0x17; private static final int OFFSET_Y_L_M =0x18; private static final int OFFSET_Y_H_M =0x19; private static final int OFFSET_Z_L_M =0x1A; private static final int OFFSET_Z_H_M =0x1B; private static final int REFERENCE_X =0x1C; private static final int REFERENCE_Y =0x1D; private static final int REFERENCE_Z =0x1E; private static final int CTRL_REG0_XM =0x1F; private static final int CTRL_REG1_XM =0x20; private static final int CTRL_REG2_XM =0x21; private static final int CTRL_REG3_XM =0x22; private static final int CTRL_REG4_XM =0x23; private static final int CTRL_REG5_XM =0x24; private static final int CTRL_REG6_XM =0x25; private static final int CTRL_REG7_XM =0x26; private static final int STATUS_REG_A =0x27; private static final int OUT_X_L_A =0x28; private static final int OUT_X_H_A =0x29; private static final int OUT_Y_L_A =0x2A; private static final int OUT_Y_H_A =0x2B; private static final int OUT_Z_L_A =0x2C; private static final int OUT_Z_H_A =0x2D; private static final int FIFO_CTRL_REG =0x2E; private static final int FIFO_SRC_REG =0x2F; private static final int INT_GEN_1_REG =0x30; private static final int INT_GEN_1_SRC =0x31; private static final int INT_GEN_1_THS =0x32; private static final int INT_GEN_1_DURATION =0x33; private static final int INT_GEN_2_REG =0x34; private static final int INT_GEN_2_SRC =0x35; private static final int INT_GEN_2_THS =0x36; private static final int INT_GEN_2_DURATION =0x37; private static final int CLICK_CFG =0x38; private static final int CLICK_SRC =0x39; private static final int CLICK_THS =0x3A; private static final int TIME_LIMIT =0x3B; private static final int TIME_LATENCY =0x3C; private static final int TIME_WINDOW = 0x3D; private static final int ACT_THS = 0x3E; private static final int ACT_DUR= 0x3F; public double temp; public double mx,my,mz; public double ax,ay,az; public double gx,gy,gz; public double i2cHeading,i2cPitch,i2cRoll; private static final String TAG = "imu"; private static final int MAX_BUFFER_LENGTH = 512; private static final int GYRO_I2C_ADDR = 0x6B; private static final int XM_I2C_ADDR = 0x1D; private I2cDevice GYRO_Device; private I2cDevice XM_Device; byte[] rx_tx_buf= new byte[MAX_BUFFER_LENGTH]; ; public imu(String i2cPort) throws IOException { PeripheralManagerService manager = new PeripheralManagerService(); List<String> deviceList = manager.getI2cBusList(); if (deviceList.isEmpty()) { Log.i(TAG, "No I2C bus available on this device."); } else { Log.i(TAG, "List of available devices: " + deviceList); } GYRO_Device = manager.openI2cDevice(i2cPort, GYRO_I2C_ADDR); XM_Device = manager.openI2cDevice(i2cPort, XM_I2C_ADDR); sendi2c( GYRO_Device, FIFO_CTRL_REG_G, 0 ); sendi2c( GYRO_Device, CTRL_REG1_G, 0x0F ); //Normal mode, enable all axes //0xFF ); //??unknown config?? sendi2c( GYRO_Device, CTRL_REG2_G, 0x00); // Normal mode, high cutoff frequency sendi2c( GYRO_Device, CTRL_REG4_G, 0x10 ); // Set scale to 500 dps sendi2c( GYRO_Device, CTRL_REG5_G, 0x00 ); // FIFO Disabled, HPF Disabled sendi2c( XM_Device, FIFO_CTRL_REG, 0 ); sendi2c( XM_Device, CTRL_REG1_XM, 0xFF ); sendi2c( XM_Device, CTRL_REG2_XM, 0x00); //Set scale +/-2g sendi2c( XM_Device, CTRL_REG4_XM, 0x30 ); sendi2c( XM_Device, CTRL_REG5_XM, 0x94); sendi2c( XM_Device, CTRL_REG6_XM, 0x00); sendi2c( XM_Device, CTRL_REG7_XM, 0x00); /* return; */ } public Thread debugInfo = new Thread(new Runnable() { @Override public void run() { while (temp == 0.0) { readSensors(); String result = String.format("gx:%6.2f gy:%6.2f gz:%6.2f ax:%6.2f ay:%6.2f az:%6.2f mx:%6.2f my:%6.2f mz:%6.2f temp:%6.2f\n", gx, gy, gz, ax, ay, az, mx, my, mz, temp); Log.d(TAG, result); try { sleep( 1000 ); } catch (InterruptedException e) { e.printStackTrace(); } } } }); private void sendi2c(I2cDevice mDevice,int reg, int tosend) { try { mDevice.writeRegByte(reg,(byte)tosend); } catch (IOException e) { e.printStackTrace(); } } private char readi2c(I2cDevice mDevice, int reg, int count) { try { mDevice.readRegBuffer(reg,rx_tx_buf,count); } catch (IOException e) { e.printStackTrace(); } if(count == 1)return (char)rx_tx_buf[0]; return 0; } public void readGyro() { readi2c(GYRO_Device, OUT_X_L_G, 6); // Read 6 bytes, beginning at OUT_X_L_G double tgx = (double)((short)(rx_tx_buf[1] << 8) | rx_tx_buf[0]) * (500.0 /*dps*/ / 32768.0); double tgy = (double)((short)(rx_tx_buf[3] << 8) | rx_tx_buf[2]) * (500.0 /*dps*/ / 32768.0); double tgz = (double)((short)(rx_tx_buf[5] << 8) | rx_tx_buf[4]) * (500.0 /*dps*/ / 32768.0); gx = tgz; gy = tgy; gz = tgx; } public void readAccel() { readi2c(XM_Device, OUT_X_L_A, 6); // Read 6 bytes, beginning at OUT_X_L_G double tax = (double)((short)(rx_tx_buf[1] << 8) | rx_tx_buf[0]) * 0.00006103515625; // Store x-axis values into gx double tay = (double)((short)(rx_tx_buf[3] << 8) | rx_tx_buf[2]) * 0.00006103515625; // Store y-axis values into gy double taz = (double)((short)(rx_tx_buf[5] << 8) | rx_tx_buf[4]) * 0.00006103515625; // Store z-axis values into gz ax = taz; ay = tay; az = tax; readi2c(XM_Device, OUT_TEMP_L_XM, 2); temp = (float)((short)(rx_tx_buf[1] << 8) | rx_tx_buf[0]); } public void readMag() { readi2c(XM_Device, OUT_X_L_M, 6); // Read 6 bytes, beginning at OUT_X_L_G double tmx = (double)((short)(rx_tx_buf[1] << 8) | rx_tx_buf[0]) * 0.00006103515625; // Store x-axis values into gx double tmy = (double)((short)(rx_tx_buf[3] << 8) | rx_tx_buf[2]) * 0.00006103515625; // Store y-axis values into gy double tmz = (double)((short)(rx_tx_buf[5] << 8) | rx_tx_buf[4]) * 0.00006103515625; // Store z-axis values into gz mx = tmz; my = tmy; mz = tmx; } public void readSensors() { readGyro(); readAccel(); readMag(); } public void getOrientation() { readSensors(); float PI_F = 3.14159265F; // i2cRoll: Rotation around the X-axis. -180 <= i2cRoll <= 180 // a positive i2cRoll angle is defined to be a clockwise rotation about the positive X-axis // y // i2cRoll = atan2(---) // z // where: y, z are returned value from accelerometer sensor i2cRoll = (float)atan2(ay, az); // i2cPitch: Rotation around the Y-axis. -180 <= i2cRoll <= 180 // a positive i2cPitch angle is defined to be a clockwise rotation about the positive Y-axis // -x // i2cPitch = atan(-------------------------------) // y * sin(i2cRoll) + z * cos(i2cRoll) // where: x, y, z are returned value from accelerometer sensor if (ay * sin(i2cRoll) + az * cos(i2cRoll) == 0) i2cPitch = ax > 0 ? (PI_F / 2) : (-PI_F / 2); else i2cPitch = (float)atan(-ax / (ay * sin(i2cRoll) + az * cos(i2cRoll))); // i2cHeading: Rotation around the Z-axis. -180 <= i2cRoll <= 180 // a positive i2cHeading angle is defined to be a clockwise rotation about the positive Z-axis // z * sin(i2cRoll) - y * cos(i2cRoll) // i2cHeading = atan2(--------------------------------------------------------------------------) // x * cos(i2cPitch) + y * sin(i2cPitch) * sin(i2cRoll) + z * sin(i2cPitch) * cos(i2cRoll)) // where: x, y, z are returned value from magnetometer sensor // i2cHeading = (float)atan2(mz * sin(i2cRoll) - my * cos(i2cRoll), mx * cos(i2cPitch) + my * sin(i2cPitch) * sin(i2cRoll) + mz * sin(i2cPitch) * cos(i2cRoll)); // Convert angular data to degree i2cRoll = - i2cRoll * 180.0 / PI_F; i2cPitch = i2cPitch * 180.0 / PI_F; i2cHeading = - i2cHeading * 180.0 / PI_F; } }
/* * Copyright 2015-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.ovsdb.rfc.table; import java.util.Map; import java.util.Set; import org.onosproject.ovsdb.rfc.notation.Column; import org.onosproject.ovsdb.rfc.notation.Row; import org.onosproject.ovsdb.rfc.notation.Uuid; import org.onosproject.ovsdb.rfc.schema.DatabaseSchema; import org.onosproject.ovsdb.rfc.tableservice.AbstractOvsdbTableService; import org.onosproject.ovsdb.rfc.tableservice.ColumnDescription; /** * This class provides operations of Mirror Table. */ public class Mirror extends AbstractOvsdbTableService { /** * Mirror table column name. */ public enum MirrorColumn { NAME("name"), SELECTSRCPORT("select_src_port"), SELECTDSTPORT("select_dst_port"), SELECTVLAN("select_vlan"), OUTPUTPORT("output_port"), EXTERNALIDS("external_ids"), OUTPUTVLAN("output_vlan"), STATISTICS("statistics"), SELECTALL("select_all"); private final String columnName; MirrorColumn(String columnName) { this.columnName = columnName; } /** * Returns the table column name for MirrorColumn. * @return the table column name */ public String columnName() { return columnName; } } /** * Constructs a Mirror object. Generate Mirror Table Description. * @param dbSchema DatabaseSchema * @param row Row */ public Mirror(DatabaseSchema dbSchema, Row row) { super(dbSchema, row, OvsdbTable.MIRROR, VersionNum.VERSION100); } /** * Get the Column entity which column name is "name" from the Row entity of * attributes. * @return the Column entity which column name is "name" */ public Column getNameColumn() { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.NAME.columnName(), "getNameColumn", VersionNum.VERSION100); return (Column) super.getColumnHandler(columndesc); } /** * Add a Column entity which column name is "name" to the Row entity of * attributes. * @param name the column data which column name is "name" */ public void setName(String name) { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.NAME.columnName(), "setName", VersionNum.VERSION100); super.setDataHandler(columndesc, name); } /** * Get the column data which column name is "name" from the Row entity of * attributes. * @return the column data which column name is "name" */ public String getName() { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.NAME.columnName(), "getName", VersionNum.VERSION100); return (String) super.getDataHandler(columndesc); } /** * Get the Column entity which column name is "select_src_port" from the Row * entity of attributes. * @return the Column entity which column name is "select_src_port" */ public Column getSelectSrcPortColumn() { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.SELECTSRCPORT.columnName(), "getSelectSrcPortColumn", VersionNum.VERSION100); return (Column) super.getColumnHandler(columndesc); } /** * Add a Column entity which column name is "select_src_port" to the Row * entity of attributes. * @param selectSrcPort the column data which column name is * "select_src_port" */ public void setSelectSrcPort(Set<Uuid> selectSrcPort) { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.SELECTSRCPORT.columnName(), "setSelectSrcPort", VersionNum.VERSION100); super.setDataHandler(columndesc, selectSrcPort); } /** * Get the Column entity which column name is "select_dst_port" from the Row * entity of attributes. * @return the Column entity which column name is "select_dst_port" */ public Column getSelectDstPortColumn() { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.SELECTDSTPORT.columnName(), "getSelectDstPortColumn", VersionNum.VERSION100); return (Column) super.getColumnHandler(columndesc); } /** * Add a Column entity which column name is "select_dst_port" to the Row * entity of attributes. * @param selectDstPrt the column data which column name is * "select_dst_port" */ public void setSelectDstPort(Set<Uuid> selectDstPrt) { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.SELECTDSTPORT.columnName(), "setSelectDstPort", VersionNum.VERSION100); super.setDataHandler(columndesc, selectDstPrt); } /** * Get the Column entity which column name is "select_vlan" from the Row * entity of attributes. * @return the Column entity which column name is "select_vlan" */ public Column getSelectVlanColumn() { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.SELECTVLAN.columnName(), "getSelectVlanColumn", VersionNum.VERSION100); return (Column) super.getColumnHandler(columndesc); } /** * Add a Column entity which column name is "select_vlan" to the Row entity * of attributes. * @param selectVlan the column data which column name is "select_vlan" */ public void setSelectVlan(Set<Short> selectVlan) { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.SELECTVLAN.columnName(), "setSelectVlan", VersionNum.VERSION100); super.setDataHandler(columndesc, selectVlan); } /** * Get the Column entity which column name is "output_port" from the Row * entity of attributes. * @return the Column entity which column name is "output_port" */ public Column getOutputPortColumn() { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.OUTPUTPORT.columnName(), "getOutputPortColumn", VersionNum.VERSION100); return (Column) super.getColumnHandler(columndesc); } /** * Add a Column entity which column name is "output_port" to the Row entity * of attributes. * @param outputPort the column data which column name is "output_port" */ public void setOutputPort(Uuid outputPort) { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.OUTPUTPORT.columnName(), "setOutputPort", VersionNum.VERSION100); super.setDataHandler(columndesc, outputPort); } /** * Get the Column entity which column name is "output_vlan" from the Row * entity of attributes. * @return the Column entity which column name is "output_vlan" */ public Column getOutputVlanColumn() { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.OUTPUTVLAN.columnName(), "getOutputVlanColumn", VersionNum.VERSION100); return (Column) super.getColumnHandler(columndesc); } /** * Add a Column entity which column name is "output_vlan" to the Row entity * of attributes. * @param outputVlan the column data which column name is "output_vlan" */ public void setOutputVlan(Short outputVlan) { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.OUTPUTVLAN.columnName(), "setOutputVlan", VersionNum.VERSION100); super.setDataHandler(columndesc, outputVlan); } /** * Get the Column entity which column name is "statistics" from the Row * entity of attributes. * @return the Column entity which column name is "statistics" */ public Column getStatisticsColumn() { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.STATISTICS.columnName(), "getStatisticsColumn", VersionNum.VERSION640); return (Column) super.getColumnHandler(columndesc); } /** * Add a Column entity which column name is "statistics" to the Row entity * of attributes. * @param statistics the column data which column name is "statistics" */ public void setStatistics(Map<String, Long> statistics) { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.STATISTICS.columnName(), "setStatistics", VersionNum.VERSION640); super.setDataHandler(columndesc, statistics); } /** * Get the Column entity which column name is "external_ids" from the Row * entity of attributes. * @return the Column entity which column name is "external_ids" */ public Column getExternalIdsColumn() { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.EXTERNALIDS.columnName(), "getExternalIdsColumn", VersionNum.VERSION100); return (Column) super.getColumnHandler(columndesc); } /** * Add a Column entity which column name is "external_ids" to the Row entity * of attributes. * @param externalIds the column data which column name is "external_ids" */ public void setExternalIds(Map<String, String> externalIds) { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.EXTERNALIDS.columnName(), "setExternalIds", VersionNum.VERSION100); super.setDataHandler(columndesc, externalIds); } /** * Get the Column entity which column name is "select_all" from the Row * entity of attributes. * @return the Column entity which column name is "select_all" */ public Column getSelectAllColumn() { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.SELECTALL.columnName(), "getSelectAllColumn", VersionNum.VERSION620); return (Column) super.getColumnHandler(columndesc); } /** * Add a Column entity which column name is "select_all" to the Row entity * of attributes. * @param selectAll the column data which column name is "select_all" */ public void setSelectAll(Boolean selectAll) { ColumnDescription columndesc = new ColumnDescription(MirrorColumn.SELECTALL.columnName(), "setSelectAll", VersionNum.VERSION620); super.setDataHandler(columndesc, selectAll); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: grpc/file_system_master.proto package alluxio.grpc; /** * Protobuf type {@code alluxio.grpc.file.MountPRequest} */ public final class MountPRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:alluxio.grpc.file.MountPRequest) MountPRequestOrBuilder { private static final long serialVersionUID = 0L; // Use MountPRequest.newBuilder() to construct. private MountPRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private MountPRequest() { alluxioPath_ = ""; ufsPath_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private MountPRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 10: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000001; alluxioPath_ = bs; break; } case 18: { com.google.protobuf.ByteString bs = input.readBytes(); bitField0_ |= 0x00000002; ufsPath_ = bs; break; } case 26: { alluxio.grpc.MountPOptions.Builder subBuilder = null; if (((bitField0_ & 0x00000004) == 0x00000004)) { subBuilder = options_.toBuilder(); } options_ = input.readMessage(alluxio.grpc.MountPOptions.PARSER, extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(options_); options_ = subBuilder.buildPartial(); } bitField0_ |= 0x00000004; break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return alluxio.grpc.FileSystemMasterProto.internal_static_alluxio_grpc_file_MountPRequest_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return alluxio.grpc.FileSystemMasterProto.internal_static_alluxio_grpc_file_MountPRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( alluxio.grpc.MountPRequest.class, alluxio.grpc.MountPRequest.Builder.class); } private int bitField0_; public static final int ALLUXIOPATH_FIELD_NUMBER = 1; private volatile java.lang.Object alluxioPath_; /** * <pre> ** the path of alluxio mount point * </pre> * * <code>optional string alluxioPath = 1;</code> */ public boolean hasAlluxioPath() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> ** the path of alluxio mount point * </pre> * * <code>optional string alluxioPath = 1;</code> */ public java.lang.String getAlluxioPath() { java.lang.Object ref = alluxioPath_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { alluxioPath_ = s; } return s; } } /** * <pre> ** the path of alluxio mount point * </pre> * * <code>optional string alluxioPath = 1;</code> */ public com.google.protobuf.ByteString getAlluxioPathBytes() { java.lang.Object ref = alluxioPath_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); alluxioPath_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int UFSPATH_FIELD_NUMBER = 2; private volatile java.lang.Object ufsPath_; /** * <pre> ** the path of the under file system * </pre> * * <code>optional string ufsPath = 2;</code> */ public boolean hasUfsPath() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> ** the path of the under file system * </pre> * * <code>optional string ufsPath = 2;</code> */ public java.lang.String getUfsPath() { java.lang.Object ref = ufsPath_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { ufsPath_ = s; } return s; } } /** * <pre> ** the path of the under file system * </pre> * * <code>optional string ufsPath = 2;</code> */ public com.google.protobuf.ByteString getUfsPathBytes() { java.lang.Object ref = ufsPath_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); ufsPath_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int OPTIONS_FIELD_NUMBER = 3; private alluxio.grpc.MountPOptions options_; /** * <code>optional .alluxio.grpc.file.MountPOptions options = 3;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .alluxio.grpc.file.MountPOptions options = 3;</code> */ public alluxio.grpc.MountPOptions getOptions() { return options_ == null ? alluxio.grpc.MountPOptions.getDefaultInstance() : options_; } /** * <code>optional .alluxio.grpc.file.MountPOptions options = 3;</code> */ public alluxio.grpc.MountPOptionsOrBuilder getOptionsOrBuilder() { return options_ == null ? alluxio.grpc.MountPOptions.getDefaultInstance() : options_; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) == 0x00000001)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, alluxioPath_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, ufsPath_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeMessage(3, getOptions()); } unknownFields.writeTo(output); } public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, alluxioPath_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, ufsPath_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(3, getOptions()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof alluxio.grpc.MountPRequest)) { return super.equals(obj); } alluxio.grpc.MountPRequest other = (alluxio.grpc.MountPRequest) obj; boolean result = true; result = result && (hasAlluxioPath() == other.hasAlluxioPath()); if (hasAlluxioPath()) { result = result && getAlluxioPath() .equals(other.getAlluxioPath()); } result = result && (hasUfsPath() == other.hasUfsPath()); if (hasUfsPath()) { result = result && getUfsPath() .equals(other.getUfsPath()); } result = result && (hasOptions() == other.hasOptions()); if (hasOptions()) { result = result && getOptions() .equals(other.getOptions()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasAlluxioPath()) { hash = (37 * hash) + ALLUXIOPATH_FIELD_NUMBER; hash = (53 * hash) + getAlluxioPath().hashCode(); } if (hasUfsPath()) { hash = (37 * hash) + UFSPATH_FIELD_NUMBER; hash = (53 * hash) + getUfsPath().hashCode(); } if (hasOptions()) { hash = (37 * hash) + OPTIONS_FIELD_NUMBER; hash = (53 * hash) + getOptions().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static alluxio.grpc.MountPRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static alluxio.grpc.MountPRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static alluxio.grpc.MountPRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static alluxio.grpc.MountPRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static alluxio.grpc.MountPRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static alluxio.grpc.MountPRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static alluxio.grpc.MountPRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static alluxio.grpc.MountPRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static alluxio.grpc.MountPRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static alluxio.grpc.MountPRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static alluxio.grpc.MountPRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static alluxio.grpc.MountPRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(alluxio.grpc.MountPRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code alluxio.grpc.file.MountPRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:alluxio.grpc.file.MountPRequest) alluxio.grpc.MountPRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return alluxio.grpc.FileSystemMasterProto.internal_static_alluxio_grpc_file_MountPRequest_descriptor; } protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return alluxio.grpc.FileSystemMasterProto.internal_static_alluxio_grpc_file_MountPRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( alluxio.grpc.MountPRequest.class, alluxio.grpc.MountPRequest.Builder.class); } // Construct using alluxio.grpc.MountPRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { getOptionsFieldBuilder(); } } public Builder clear() { super.clear(); alluxioPath_ = ""; bitField0_ = (bitField0_ & ~0x00000001); ufsPath_ = ""; bitField0_ = (bitField0_ & ~0x00000002); if (optionsBuilder_ == null) { options_ = null; } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return alluxio.grpc.FileSystemMasterProto.internal_static_alluxio_grpc_file_MountPRequest_descriptor; } public alluxio.grpc.MountPRequest getDefaultInstanceForType() { return alluxio.grpc.MountPRequest.getDefaultInstance(); } public alluxio.grpc.MountPRequest build() { alluxio.grpc.MountPRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public alluxio.grpc.MountPRequest buildPartial() { alluxio.grpc.MountPRequest result = new alluxio.grpc.MountPRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.alluxioPath_ = alluxioPath_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.ufsPath_ = ufsPath_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } if (optionsBuilder_ == null) { result.options_ = options_; } else { result.options_ = optionsBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder clone() { return (Builder) super.clone(); } public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof alluxio.grpc.MountPRequest) { return mergeFrom((alluxio.grpc.MountPRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(alluxio.grpc.MountPRequest other) { if (other == alluxio.grpc.MountPRequest.getDefaultInstance()) return this; if (other.hasAlluxioPath()) { bitField0_ |= 0x00000001; alluxioPath_ = other.alluxioPath_; onChanged(); } if (other.hasUfsPath()) { bitField0_ |= 0x00000002; ufsPath_ = other.ufsPath_; onChanged(); } if (other.hasOptions()) { mergeOptions(other.getOptions()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } public final boolean isInitialized() { return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { alluxio.grpc.MountPRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (alluxio.grpc.MountPRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.lang.Object alluxioPath_ = ""; /** * <pre> ** the path of alluxio mount point * </pre> * * <code>optional string alluxioPath = 1;</code> */ public boolean hasAlluxioPath() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <pre> ** the path of alluxio mount point * </pre> * * <code>optional string alluxioPath = 1;</code> */ public java.lang.String getAlluxioPath() { java.lang.Object ref = alluxioPath_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { alluxioPath_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> ** the path of alluxio mount point * </pre> * * <code>optional string alluxioPath = 1;</code> */ public com.google.protobuf.ByteString getAlluxioPathBytes() { java.lang.Object ref = alluxioPath_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); alluxioPath_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> ** the path of alluxio mount point * </pre> * * <code>optional string alluxioPath = 1;</code> */ public Builder setAlluxioPath( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; alluxioPath_ = value; onChanged(); return this; } /** * <pre> ** the path of alluxio mount point * </pre> * * <code>optional string alluxioPath = 1;</code> */ public Builder clearAlluxioPath() { bitField0_ = (bitField0_ & ~0x00000001); alluxioPath_ = getDefaultInstance().getAlluxioPath(); onChanged(); return this; } /** * <pre> ** the path of alluxio mount point * </pre> * * <code>optional string alluxioPath = 1;</code> */ public Builder setAlluxioPathBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; alluxioPath_ = value; onChanged(); return this; } private java.lang.Object ufsPath_ = ""; /** * <pre> ** the path of the under file system * </pre> * * <code>optional string ufsPath = 2;</code> */ public boolean hasUfsPath() { return ((bitField0_ & 0x00000002) == 0x00000002); } /** * <pre> ** the path of the under file system * </pre> * * <code>optional string ufsPath = 2;</code> */ public java.lang.String getUfsPath() { java.lang.Object ref = ufsPath_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { ufsPath_ = s; } return s; } else { return (java.lang.String) ref; } } /** * <pre> ** the path of the under file system * </pre> * * <code>optional string ufsPath = 2;</code> */ public com.google.protobuf.ByteString getUfsPathBytes() { java.lang.Object ref = ufsPath_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); ufsPath_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> ** the path of the under file system * </pre> * * <code>optional string ufsPath = 2;</code> */ public Builder setUfsPath( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; ufsPath_ = value; onChanged(); return this; } /** * <pre> ** the path of the under file system * </pre> * * <code>optional string ufsPath = 2;</code> */ public Builder clearUfsPath() { bitField0_ = (bitField0_ & ~0x00000002); ufsPath_ = getDefaultInstance().getUfsPath(); onChanged(); return this; } /** * <pre> ** the path of the under file system * </pre> * * <code>optional string ufsPath = 2;</code> */ public Builder setUfsPathBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; ufsPath_ = value; onChanged(); return this; } private alluxio.grpc.MountPOptions options_ = null; private com.google.protobuf.SingleFieldBuilderV3< alluxio.grpc.MountPOptions, alluxio.grpc.MountPOptions.Builder, alluxio.grpc.MountPOptionsOrBuilder> optionsBuilder_; /** * <code>optional .alluxio.grpc.file.MountPOptions options = 3;</code> */ public boolean hasOptions() { return ((bitField0_ & 0x00000004) == 0x00000004); } /** * <code>optional .alluxio.grpc.file.MountPOptions options = 3;</code> */ public alluxio.grpc.MountPOptions getOptions() { if (optionsBuilder_ == null) { return options_ == null ? alluxio.grpc.MountPOptions.getDefaultInstance() : options_; } else { return optionsBuilder_.getMessage(); } } /** * <code>optional .alluxio.grpc.file.MountPOptions options = 3;</code> */ public Builder setOptions(alluxio.grpc.MountPOptions value) { if (optionsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } options_ = value; onChanged(); } else { optionsBuilder_.setMessage(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .alluxio.grpc.file.MountPOptions options = 3;</code> */ public Builder setOptions( alluxio.grpc.MountPOptions.Builder builderForValue) { if (optionsBuilder_ == null) { options_ = builderForValue.build(); onChanged(); } else { optionsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .alluxio.grpc.file.MountPOptions options = 3;</code> */ public Builder mergeOptions(alluxio.grpc.MountPOptions value) { if (optionsBuilder_ == null) { if (((bitField0_ & 0x00000004) == 0x00000004) && options_ != null && options_ != alluxio.grpc.MountPOptions.getDefaultInstance()) { options_ = alluxio.grpc.MountPOptions.newBuilder(options_).mergeFrom(value).buildPartial(); } else { options_ = value; } onChanged(); } else { optionsBuilder_.mergeFrom(value); } bitField0_ |= 0x00000004; return this; } /** * <code>optional .alluxio.grpc.file.MountPOptions options = 3;</code> */ public Builder clearOptions() { if (optionsBuilder_ == null) { options_ = null; onChanged(); } else { optionsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000004); return this; } /** * <code>optional .alluxio.grpc.file.MountPOptions options = 3;</code> */ public alluxio.grpc.MountPOptions.Builder getOptionsBuilder() { bitField0_ |= 0x00000004; onChanged(); return getOptionsFieldBuilder().getBuilder(); } /** * <code>optional .alluxio.grpc.file.MountPOptions options = 3;</code> */ public alluxio.grpc.MountPOptionsOrBuilder getOptionsOrBuilder() { if (optionsBuilder_ != null) { return optionsBuilder_.getMessageOrBuilder(); } else { return options_ == null ? alluxio.grpc.MountPOptions.getDefaultInstance() : options_; } } /** * <code>optional .alluxio.grpc.file.MountPOptions options = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< alluxio.grpc.MountPOptions, alluxio.grpc.MountPOptions.Builder, alluxio.grpc.MountPOptionsOrBuilder> getOptionsFieldBuilder() { if (optionsBuilder_ == null) { optionsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< alluxio.grpc.MountPOptions, alluxio.grpc.MountPOptions.Builder, alluxio.grpc.MountPOptionsOrBuilder>( getOptions(), getParentForChildren(), isClean()); options_ = null; } return optionsBuilder_; } public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:alluxio.grpc.file.MountPRequest) } // @@protoc_insertion_point(class_scope:alluxio.grpc.file.MountPRequest) private static final alluxio.grpc.MountPRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new alluxio.grpc.MountPRequest(); } public static alluxio.grpc.MountPRequest getDefaultInstance() { return DEFAULT_INSTANCE; } @java.lang.Deprecated public static final com.google.protobuf.Parser<MountPRequest> PARSER = new com.google.protobuf.AbstractParser<MountPRequest>() { public MountPRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new MountPRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<MountPRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<MountPRequest> getParserForType() { return PARSER; } public alluxio.grpc.MountPRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Distributable under LGPL license. * See terms of license at gnu.org. */ package net.java.sip.communicator.impl.gui.main.account; import java.awt.*; import java.awt.event.*; import java.util.*; import javax.swing.*; import net.java.sip.communicator.impl.gui.*; import net.java.sip.communicator.impl.gui.event.*; import net.java.sip.communicator.impl.gui.utils.*; import net.java.sip.communicator.plugin.desktoputil.*; import net.java.sip.communicator.service.gui.*; import net.java.sip.communicator.service.gui.Container; import net.java.sip.communicator.util.*; import net.java.sip.communicator.util.skin.*; import org.jitsi.service.resources.*; import org.osgi.framework.*; /** * AccountRightButtonMenu is the menu that opens when user right clicks on any * of his accounts in the account list section. * * @author Marin Dzhigarov */ public class AccountRightButtonMenu extends SIPCommPopupMenu implements ActionListener, PluginComponentListener, Skinnable { /** * The serial version UID. */ private static final long serialVersionUID = 1L; /** * The logger of this class. */ private final Logger logger = Logger.getLogger(AccountRightButtonMenu.class); /** * The Account that is clicked on. */ private Account account = null; /** * A list of all PluginComponents that are registered through the OSGi * bundle context. */ private java.util.List<PluginComponent> pluginComponents = new ArrayList<PluginComponent>(); /** * The edit item. */ private final JMenuItem editItem = new JMenuItem( GuiActivator.getResources().getI18NString( "service.gui.EDIT")); /** * Creates an instance of AccountRightButtonMenu */ public AccountRightButtonMenu() { super(); this.setLocation(getLocation()); this.init(); loadSkin(); } /** * Sets the current Account that is clicked on. * @param account the Account that is clicked on. */ public void setAccount(Account account) { this.account = account; editItem.setEnabled(account != null && this.account.isEnabled()); for (PluginComponent pluginComponent : pluginComponents) pluginComponent.setCurrentAccountID(account.getAccountID()); } /** * Returns the Account that was last clicked on. * @return the Account that was last clicked on. */ public Account getAccount() { return account; } /** * Initialized the menu by adding all containing menu items. */ private void init() { initPluginComponents(); add(editItem); editItem.addActionListener(this); } /** * Initializes plug-in components for this container. */ private void initPluginComponents() { // Search for plugin components registered through the OSGI bundle // context. Collection<ServiceReference<PluginComponentFactory>> serRefs; String osgiFilter = "(" + Container.CONTAINER_ID + "=" + Container.CONTAINER_ACCOUNT_RIGHT_BUTTON_MENU.getID() + ")"; try { serRefs = GuiActivator.bundleContext.getServiceReferences( PluginComponentFactory.class, osgiFilter); } catch (InvalidSyntaxException ex) { serRefs = null; logger.error("Could not obtain plugin reference.", ex); } if ((serRefs != null) && !serRefs.isEmpty()) { for (ServiceReference<PluginComponentFactory> serRef : serRefs) { PluginComponentFactory factory = GuiActivator.bundleContext.getService(serRef); PluginComponent component = factory.getPluginComponentInstance(this); if (component.getComponent() == null) continue; pluginComponents.add(component); int positionIndex = factory.getPositionIndex(); if (positionIndex != -1) add((Component)component.getComponent(), positionIndex); else add((Component)component.getComponent()); } } GuiActivator.getUIService().addPluginComponentListener(this); } /** * Reloads skin related information. */ public void loadSkin() { editItem.setIcon(new ImageIcon( ImageLoader.getImage(ImageLoader.ACCOUNT_EDIT_ICON))); } /** * Indicates that a plugin component has been added to this container. * * @param event the <tt>PluginComponentEvent</tt> that notified us */ /** * Indicates that a new plugin component has been added. Adds it to this * container if it belongs to it. * * @param event the <tt>PluginComponentEvent</tt> that notified us */ public void pluginComponentAdded(PluginComponentEvent event) { PluginComponentFactory factory = event.getPluginComponentFactory(); if (!factory.getContainer().equals( Container.CONTAINER_ACCOUNT_RIGHT_BUTTON_MENU)) return; PluginComponent c = factory.getPluginComponentInstance(this); this.add((Component) c.getComponent()); this.repaint(); } /** * Removes the according plug-in component from this container. * @param event the <tt>PluginComponentEvent</tt> that notified us */ public void pluginComponentRemoved(PluginComponentEvent event) { PluginComponentFactory factory = event.getPluginComponentFactory(); if(factory.getContainer() .equals(Container.CONTAINER_ACCOUNT_RIGHT_BUTTON_MENU)) { Component c = (Component)factory.getPluginComponentInstance(this) .getComponent(); this.remove(c); pluginComponents.remove(c); } } /** * Handles the <tt>ActionEvent</tt>. Determines which menu item was * selected and performs the appropriate operations. * @param e the <tt>ActionEvent</tt>, which notified us of the action */ public void actionPerformed(ActionEvent e) { JMenuItem menuItem = (JMenuItem) e.getSource(); if (menuItem.equals(editItem)) { if (account == null) return; AccountRegWizardContainerImpl wizard = (AccountRegWizardContainerImpl) GuiActivator.getUIService() .getAccountRegWizardContainer(); AccountRegistrationWizard protocolWizard = wizard.getProtocolWizard(account.getProtocolProvider()); ResourceManagementService resources = GuiActivator.getResources(); if (protocolWizard != null) { wizard.setTitle(resources.getI18NString( "service.gui.ACCOUNT_REGISTRATION_WIZARD")); wizard.modifyAccount(account.getProtocolProvider()); wizard.showDialog(false); } else { // There is no wizard for this account - just show an error // dialog: String title = resources.getI18NString("service.gui.ERROR"); String message = resources.getI18NString("service.gui.EDIT_NOT_SUPPORTED"); ErrorDialog dialog = new ErrorDialog(null, title, message); dialog.setVisible(true); } } } }
/* * Copyright (c) 1997, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.tools.internal.ws.wsdl.parser; import com.sun.tools.internal.ws.api.wsdl.TWSDLExtensible; import com.sun.tools.internal.ws.api.wsdl.TWSDLParserContext; import com.sun.tools.internal.ws.util.xml.XmlUtil; import com.sun.tools.internal.ws.wsdl.document.soap.*; import com.sun.tools.internal.ws.wsdl.framework.TWSDLParserContextImpl; import org.w3c.dom.Element; import org.xml.sax.Locator; import javax.xml.namespace.QName; import java.util.Iterator; import java.util.Map; /** * The SOAP extension handler for WSDL. * * @author WS Development Team */ public class SOAPExtensionHandler extends AbstractExtensionHandler { public SOAPExtensionHandler(Map<String, AbstractExtensionHandler> extensionHandlerMap) { super(extensionHandlerMap); } public String getNamespaceURI() { return Constants.NS_WSDL_SOAP; } public boolean handleDefinitionsExtension( TWSDLParserContext context, TWSDLExtensible parent, Element e) { Util.fail( "parsing.invalidExtensionElement", e.getTagName(), e.getNamespaceURI()); return false; // keep compiler happy } public boolean handleTypesExtension( com.sun.tools.internal.ws.api.wsdl.TWSDLParserContext context, TWSDLExtensible parent, Element e) { Util.fail( "parsing.invalidExtensionElement", e.getTagName(), e.getNamespaceURI()); return false; // keep compiler happy } protected SOAPBinding getSOAPBinding(Locator location){ return new SOAPBinding(location); } public boolean handleBindingExtension( TWSDLParserContext context, TWSDLExtensible parent, Element e) { if (XmlUtil.matchesTagNS(e, getBindingQName())) { context.push(); context.registerNamespaces(e); SOAPBinding binding = getSOAPBinding(context.getLocation(e)); // NOTE - the "transport" attribute is required according to section 3.3 of the WSDL 1.1 spec, // but optional according to the schema in appendix A 4.2 of the same document! String transport = Util.getRequiredAttribute(e, Constants.ATTR_TRANSPORT); binding.setTransport(transport); String style = XmlUtil.getAttributeOrNull(e, Constants.ATTR_STYLE); if (style != null) { if (style.equals(Constants.ATTRVALUE_RPC)) { binding.setStyle(SOAPStyle.RPC); } else if (style.equals(Constants.ATTRVALUE_DOCUMENT)) { binding.setStyle(SOAPStyle.DOCUMENT); } else { Util.fail( "parsing.invalidAttributeValue", Constants.ATTR_STYLE, style); } } parent.addExtension(binding); context.pop(); // context.fireDoneParsingEntity(getBindingQName(), binding); return true; } else { Util.fail( "parsing.invalidExtensionElement", e.getTagName(), e.getNamespaceURI()); return false; // keep compiler happy } } public boolean handleOperationExtension( TWSDLParserContext context, TWSDLExtensible parent, Element e) { if (XmlUtil.matchesTagNS(e, getOperationQName())) { context.push(); context.registerNamespaces(e); SOAPOperation operation = new SOAPOperation(context.getLocation(e)); String soapAction = XmlUtil.getAttributeOrNull(e, Constants.ATTR_SOAP_ACTION); if (soapAction != null) { operation.setSOAPAction(soapAction); } String style = XmlUtil.getAttributeOrNull(e, Constants.ATTR_STYLE); if (style != null) { if (style.equals(Constants.ATTRVALUE_RPC)) { operation.setStyle(SOAPStyle.RPC); } else if (style.equals(Constants.ATTRVALUE_DOCUMENT)) { operation.setStyle(SOAPStyle.DOCUMENT); } else { Util.fail( "parsing.invalidAttributeValue", Constants.ATTR_STYLE, style); } } parent.addExtension(operation); context.pop(); // context.fireDoneParsingEntity( // getOperationQName(), // operation); return true; } else { Util.fail( "parsing.invalidExtensionElement", e.getTagName(), e.getNamespaceURI()); return false; // keep compiler happy } } public boolean handleInputExtension( TWSDLParserContext context, TWSDLExtensible parent, Element e) { return handleInputOutputExtension(context, parent, e); } public boolean handleOutputExtension( TWSDLParserContext context, TWSDLExtensible parent, Element e) { return handleInputOutputExtension(context, parent, e); } @Override protected boolean handleMIMEPartExtension( TWSDLParserContext context, TWSDLExtensible parent, Element e) { return handleInputOutputExtension(context, parent, e); } protected boolean handleInputOutputExtension( TWSDLParserContext contextif, TWSDLExtensible parent, Element e) { TWSDLParserContextImpl context = (TWSDLParserContextImpl)contextif; if (XmlUtil.matchesTagNS(e, getBodyQName())) { context.push(); context.registerNamespaces(e); SOAPBody body = new SOAPBody(context.getLocation(e)); String use = XmlUtil.getAttributeOrNull(e, Constants.ATTR_USE); if (use != null) { if (use.equals(Constants.ATTRVALUE_LITERAL)) { body.setUse(SOAPUse.LITERAL); } else if (use.equals(Constants.ATTRVALUE_ENCODED)) { body.setUse(SOAPUse.ENCODED); } else { Util.fail( "parsing.invalidAttributeValue", Constants.ATTR_USE, use); } } String namespace = XmlUtil.getAttributeOrNull(e, Constants.ATTR_NAMESPACE); if (namespace != null) { body.setNamespace(namespace); } String encodingStyle = XmlUtil.getAttributeOrNull(e, Constants.ATTR_ENCODING_STYLE); if (encodingStyle != null) { body.setEncodingStyle(encodingStyle); } String parts = XmlUtil.getAttributeOrNull(e, Constants.ATTR_PARTS); if (parts != null) { body.setParts(parts); } parent.addExtension(body); context.pop(); // context.fireDoneParsingEntity(getBodyQName(), body); return true; } else if (XmlUtil.matchesTagNS(e, getHeaderQName())) { return handleHeaderElement(parent, e, context); } else { Util.fail("parsing.invalidExtensionElement", e.getTagName(), e.getNamespaceURI()); return false; // keep compiler happy } } private boolean handleHeaderElement(TWSDLExtensible parent, Element e, TWSDLParserContextImpl context) { context.push(); context.registerNamespaces(e); SOAPHeader header = new SOAPHeader(context.getLocation(e)); String use = XmlUtil.getAttributeOrNull(e, Constants.ATTR_USE); if (use != null) { if (use.equals(Constants.ATTRVALUE_LITERAL)) { header.setUse(SOAPUse.LITERAL); } else if (use.equals(Constants.ATTRVALUE_ENCODED)) { header.setUse(SOAPUse.ENCODED); } else { Util.fail("parsing.invalidAttributeValue", Constants.ATTR_USE, use); } } String namespace = XmlUtil.getAttributeOrNull(e, Constants.ATTR_NAMESPACE); if (namespace != null) { header.setNamespace(namespace); } String encodingStyle = XmlUtil.getAttributeOrNull(e, Constants.ATTR_ENCODING_STYLE); if (encodingStyle != null) { header.setEncodingStyle(encodingStyle); } String part = XmlUtil.getAttributeOrNull(e, Constants.ATTR_PART); if (part != null) { header.setPart(part); } String messageAttr = XmlUtil.getAttributeOrNull(e, Constants.ATTR_MESSAGE); if (messageAttr != null) { header.setMessage(context.translateQualifiedName(context.getLocation(e), messageAttr)); } for (Iterator iter = XmlUtil.getAllChildren(e); iter.hasNext();) { Element e2 = Util.nextElement(iter); if (e2 == null) break; if (XmlUtil.matchesTagNS(e2, getHeaderfaultQName())) { handleHeaderFaultElement(e, context, header, use, e2); } else { Util.fail("parsing.invalidElement", e2.getTagName(), e2.getNamespaceURI()); } } parent.addExtension(header); context.pop(); context.fireDoneParsingEntity(getHeaderQName(), header); return true; } private void handleHeaderFaultElement(Element e, TWSDLParserContextImpl context, SOAPHeader header, String use, Element e2) { context.push(); context.registerNamespaces(e); SOAPHeaderFault headerfault = new SOAPHeaderFault(context.getLocation(e)); String use2 = XmlUtil.getAttributeOrNull(e2, Constants.ATTR_USE); if (use2 != null) { if (use2.equals(Constants.ATTRVALUE_LITERAL)) { headerfault.setUse(SOAPUse.LITERAL); } else if (use.equals(Constants.ATTRVALUE_ENCODED)) { headerfault.setUse(SOAPUse.ENCODED); } else { Util.fail("parsing.invalidAttributeValue", Constants.ATTR_USE, use2); } } String namespace2 = XmlUtil.getAttributeOrNull(e2, Constants.ATTR_NAMESPACE); if (namespace2 != null) { headerfault.setNamespace(namespace2); } String encodingStyle2 = XmlUtil.getAttributeOrNull(e2, Constants.ATTR_ENCODING_STYLE); if (encodingStyle2 != null) { headerfault.setEncodingStyle(encodingStyle2); } String part2 = XmlUtil.getAttributeOrNull(e2, Constants.ATTR_PART); if (part2 != null) { headerfault.setPart(part2); } String messageAttr2 = XmlUtil.getAttributeOrNull(e2, Constants.ATTR_MESSAGE); if (messageAttr2 != null) { headerfault.setMessage( context.translateQualifiedName(context.getLocation(e2), messageAttr2)); } header.add(headerfault); context.pop(); } public boolean handleFaultExtension( TWSDLParserContext context, TWSDLExtensible parent, Element e) { if (XmlUtil.matchesTagNS(e, getFaultQName())) { context.push(); context.registerNamespaces(e); SOAPFault fault = new SOAPFault(context.getLocation(e)); String name = XmlUtil.getAttributeOrNull(e, Constants.ATTR_NAME); if (name != null) { fault.setName(name); } String use = XmlUtil.getAttributeOrNull(e, Constants.ATTR_USE); if (use != null) { if (use.equals(Constants.ATTRVALUE_LITERAL)) { fault.setUse(SOAPUse.LITERAL); } else if (use.equals(Constants.ATTRVALUE_ENCODED)) { fault.setUse(SOAPUse.ENCODED); } else { Util.fail( "parsing.invalidAttributeValue", Constants.ATTR_USE, use); } } String namespace = XmlUtil.getAttributeOrNull(e, Constants.ATTR_NAMESPACE); if (namespace != null) { fault.setNamespace(namespace); } String encodingStyle = XmlUtil.getAttributeOrNull(e, Constants.ATTR_ENCODING_STYLE); if (encodingStyle != null) { fault.setEncodingStyle(encodingStyle); } parent.addExtension(fault); context.pop(); // context.fireDoneParsingEntity(getFaultQName(), fault); return true; } else if (XmlUtil.matchesTagNS(e, getHeaderQName())) { // although SOAP spec doesn't define meaning of this extension; it is allowed // to be here, so we have to accept it, not fail (bug 13576977) return handleHeaderElement(parent, e, (TWSDLParserContextImpl) context); } else { Util.fail( "parsing.invalidExtensionElement", e.getTagName(), e.getNamespaceURI()); return false; // keep compiler happy } } public boolean handleServiceExtension( TWSDLParserContext context, TWSDLExtensible parent, Element e) { Util.fail( "parsing.invalidExtensionElement", e.getTagName(), e.getNamespaceURI()); return false; // keep compiler happy } @Override public boolean handlePortExtension( TWSDLParserContext context, TWSDLExtensible parent, Element e) { if (XmlUtil.matchesTagNS(e, getAddressQName())) { context.push(); context.registerNamespaces(e); SOAPAddress address = new SOAPAddress(context.getLocation(e)); String location = Util.getRequiredAttribute(e, Constants.ATTR_LOCATION); address.setLocation(location); parent.addExtension(address); context.pop(); // context.fireDoneParsingEntity(getAddressQName(), address); return true; } else { Util.fail( "parsing.invalidExtensionElement", e.getTagName(), e.getNamespaceURI()); return false; // keep compiler happy } } public boolean handlePortTypeExtension(TWSDLParserContext context, TWSDLExtensible parent, Element e) { Util.fail( "parsing.invalidExtensionElement", e.getTagName(), e.getNamespaceURI()); return false; // keep compiler happy } protected QName getBodyQName(){ return SOAPConstants.QNAME_BODY; } protected QName getHeaderQName(){ return SOAPConstants.QNAME_HEADER; } protected QName getHeaderfaultQName(){ return SOAPConstants.QNAME_HEADERFAULT; } protected QName getOperationQName(){ return SOAPConstants.QNAME_OPERATION; } protected QName getFaultQName(){ return SOAPConstants.QNAME_FAULT; } protected QName getAddressQName(){ return SOAPConstants.QNAME_ADDRESS; } protected QName getBindingQName(){ return SOAPConstants.QNAME_BINDING; } }
/* * Copyright 2016, gRPC Authors All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.grpc.testing.integration; import static com.google.common.base.Preconditions.checkArgument; import io.netty.util.concurrent.DefaultThreadFactory; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.util.concurrent.BlockingQueue; import java.util.concurrent.DelayQueue; import java.util.concurrent.Delayed; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.logging.Logger; public final class TrafficControlProxy { private static final int DEFAULT_BAND_BPS = 1024 * 1024; private static final int DEFAULT_DELAY_NANOS = 200 * 1000 * 1000; private static final Logger logger = Logger.getLogger(TrafficControlProxy.class.getName()); // TODO: make host and ports arguments private final String localhost = "localhost"; private final int serverPort; private final int queueLength; private final int chunkSize; private final int bandwidth; private final long latency; private volatile boolean shutDown; private ServerSocket clientAcceptor; private Socket serverSock; private Socket clientSock; private final ThreadPoolExecutor executor = new ThreadPoolExecutor(5, 10, 1, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>(), new DefaultThreadFactory("proxy-pool", true)); /** * Returns a new TrafficControlProxy with default bandwidth and latency. */ public TrafficControlProxy(int serverPort) { this(serverPort, DEFAULT_BAND_BPS, DEFAULT_DELAY_NANOS, TimeUnit.NANOSECONDS); } /** * Returns a new TrafficControlProxy with bandwidth set to targetBPS, and latency set to * targetLatency in latencyUnits. */ public TrafficControlProxy(int serverPort, int targetBps, int targetLatency, TimeUnit latencyUnits) { checkArgument(targetBps > 0); checkArgument(targetLatency > 0); this.serverPort = serverPort; bandwidth = targetBps; // divide by 2 because latency is applied in both directions latency = latencyUnits.toNanos(targetLatency) / 2; queueLength = (int) Math.max(bandwidth * latency / TimeUnit.SECONDS.toNanos(1), 1); chunkSize = Math.max(1, queueLength); } /** * Starts a new thread that waits for client and server and start reader/writer threads. */ public void start() throws IOException { // ClientAcceptor uses a ServerSocket server so that the client can connect to the proxy as it // normally would a server. serverSock then connects the server using a regular Socket as a // client normally would. clientAcceptor = new ServerSocket(); clientAcceptor.bind(new InetSocketAddress(localhost, 0)); executor.execute(new Runnable() { @Override public void run() { try { clientSock = clientAcceptor.accept(); serverSock = new Socket(); serverSock.connect(new InetSocketAddress(localhost, serverPort)); startWorkers(); } catch (IOException e) { throw new RuntimeException(e); } } }); logger.info("Started new proxy on port " + clientAcceptor.getLocalPort() + " with Queue Length " + queueLength); } public int getPort() { return clientAcceptor.getLocalPort(); } /** Interrupt all workers and close sockets. */ public void shutDown() throws IOException { // TODO: Handle case where a socket fails to close, therefore blocking the others from closing logger.info("Proxy shutting down... "); shutDown = true; executor.shutdown(); clientAcceptor.close(); clientSock.close(); serverSock.close(); logger.info("Shutdown Complete"); } private void startWorkers() throws IOException { DataInputStream clientIn = new DataInputStream(clientSock.getInputStream()); DataOutputStream clientOut = new DataOutputStream(serverSock.getOutputStream()); DataInputStream serverIn = new DataInputStream(serverSock.getInputStream()); DataOutputStream serverOut = new DataOutputStream(clientSock.getOutputStream()); MessageQueue clientPipe = new MessageQueue(clientIn, clientOut); MessageQueue serverPipe = new MessageQueue(serverIn, serverOut); executor.execute(new Reader(clientPipe)); executor.execute(new Writer(clientPipe)); executor.execute(new Reader(serverPipe)); executor.execute(new Writer(serverPipe)); } private final class Reader implements Runnable { private final MessageQueue queue; Reader(MessageQueue queue) { this.queue = queue; } @Override public void run() { while (!shutDown) { try { queue.readIn(); } catch (IOException e) { shutDown = true; } catch (InterruptedException e) { shutDown = true; } } } } private final class Writer implements Runnable { private final MessageQueue queue; Writer(MessageQueue queue) { this.queue = queue; } @Override public void run() { while (!shutDown) { try { queue.writeOut(); } catch (IOException e) { shutDown = true; } catch (InterruptedException e) { shutDown = true; } } } } /** * A Delay Queue that counts by number of bytes instead of the number of elements. */ private class MessageQueue { DataInputStream inStream; DataOutputStream outStream; int bytesQueued; BlockingQueue<Message> queue = new DelayQueue<Message>(); MessageQueue(DataInputStream inputStream, DataOutputStream outputStream) { inStream = inputStream; outStream = outputStream; } /** * Take a message off the queue and write it to an endpoint. Blocks until a message becomes * available. */ void writeOut() throws InterruptedException, IOException { Message next = queue.take(); outStream.write(next.message, 0, next.messageLength); incrementBytes(-next.messageLength); } /** * Read bytes from an endpoint and add them as a message to the queue. Blocks if the queue is * full. */ void readIn() throws InterruptedException, IOException { byte[] request = new byte[getNextChunk()]; int readableBytes = inStream.read(request); long sendTime = System.nanoTime() + latency; queue.put(new Message(sendTime, request, readableBytes)); incrementBytes(readableBytes); } /** * Block until space on the queue becomes available. Returns how many bytes can be read on to * the queue */ synchronized int getNextChunk() throws InterruptedException { while (bytesQueued == queueLength) { wait(); } return Math.max(0, Math.min(chunkSize, queueLength - bytesQueued)); } synchronized void incrementBytes(int delta) { bytesQueued += delta; if (bytesQueued < queueLength) { notifyAll(); } } } private static class Message implements Delayed { long sendTime; byte[] message; int messageLength; Message(long sendTime, byte[] message, int messageLength) { this.sendTime = sendTime; this.message = message; this.messageLength = messageLength; } @Override public int compareTo(Delayed o) { return ((Long) sendTime).compareTo(((Message) o).sendTime); } @Override public long getDelay(TimeUnit unit) { return unit.convert(sendTime - System.nanoTime(), TimeUnit.NANOSECONDS); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.resource.bundles; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.Serializable; import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.MissingResourceException; import jakarta.servlet.http.HttpServletResponse; import org.apache.wicket.Application; import org.apache.wicket.markup.head.IReferenceHeaderItem; import org.apache.wicket.request.resource.AbstractResource; import org.apache.wicket.request.resource.IResource; import org.apache.wicket.request.resource.ResourceReference; import org.apache.wicket.request.resource.caching.IStaticCacheableResource; import org.apache.wicket.resource.ITextResourceCompressor; import org.apache.wicket.util.io.ByteArrayOutputStream; import org.apache.wicket.util.io.IOUtils; import org.apache.wicket.util.lang.Args; import org.apache.wicket.util.lang.Bytes; import org.apache.wicket.util.lang.Classes; import org.apache.wicket.util.resource.AbstractResourceStream; import org.apache.wicket.util.resource.IResourceStream; import org.apache.wicket.util.resource.ResourceStreamNotFoundException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A {@linkplain IResource resource} that concatenates several resources into one download. This * resource can only bundle {@link IStaticCacheableResource}s. The content type of the resource will * be that of the first resource that specifies its content type. * * @author papegaaij */ public class ConcatBundleResource extends AbstractResource implements IStaticCacheableResource { private static final Logger log = LoggerFactory.getLogger(ConcatBundleResource.class); private static final long serialVersionUID = 1L; private final List<? extends IReferenceHeaderItem> providedResources; private boolean cachingEnabled; /** * An optional compressor that will be used to compress the bundle resources */ private ITextResourceCompressor compressor; /** * Construct. * * @param providedResources */ public ConcatBundleResource(List<? extends IReferenceHeaderItem> providedResources) { this.providedResources = Args.notNull(providedResources, "providedResources"); cachingEnabled = true; } @Override protected ResourceResponse newResourceResponse(Attributes attributes) { final ResourceResponse resourceResponse = new ResourceResponse(); if (resourceResponse.dataNeedsToBeWritten(attributes)) { try { List<IResourceStream> resources = collectResourceStreams(); if (resources == null) return sendResourceError(resourceResponse, HttpServletResponse.SC_NOT_FOUND, "Unable to find resource"); resourceResponse.setContentType(findContentType(resources)); // add Last-Modified header (to support HEAD requests and If-Modified-Since) final Instant lastModified = findLastModified(resources); if (lastModified != null) resourceResponse.setLastModified(lastModified); // read resource data final byte[] bytes = readAllResources(resources); // send Content-Length header resourceResponse.setContentLength(bytes.length); // send response body with resource data resourceResponse.setWriteCallback(new WriteCallback() { @Override public void writeData(Attributes attributes) { attributes.getResponse().write(bytes); } }); } catch (IOException e) { log.debug(e.getMessage(), e); return sendResourceError(resourceResponse, 500, "Unable to read resource stream"); } catch (ResourceStreamNotFoundException e) { log.debug(e.getMessage(), e); return sendResourceError(resourceResponse, 500, "Unable to open resource stream"); } } return resourceResponse; } private List<IResourceStream> collectResourceStreams() { List<IResourceStream> ret = new ArrayList<>(providedResources.size()); for (IReferenceHeaderItem curItem : providedResources) { IResourceStream stream = ((IStaticCacheableResource)curItem.getReference() .getResource()).getResourceStream(); if (stream == null) { reportError(curItem.getReference(), "Cannot get resource stream for "); return null; } ret.add(stream); } return ret; } protected String findContentType(List<IResourceStream> resources) { for (IResourceStream curStream : resources) if (curStream.getContentType() != null) return curStream.getContentType(); return null; } protected Instant findLastModified(List<IResourceStream> resources) { Instant ret = null; for (IResourceStream curStream : resources) { Instant curLastModified = curStream.lastModifiedTime(); if (ret == null || curLastModified.isAfter(ret)) ret = curLastModified; } return ret; } protected byte[] readAllResources(List<IResourceStream> resources) throws IOException, ResourceStreamNotFoundException { try (ByteArrayOutputStream output = new ByteArrayOutputStream()) { for (IResourceStream curStream : resources) { IOUtils.copy(curStream.getInputStream(), output); } return output.toByteArray(); } } private ResourceResponse sendResourceError(ResourceResponse resourceResponse, int errorCode, String errorMessage) { if (log.isWarnEnabled()) { String msg = String.format("Bundled resource: %s (status=%d)", errorMessage, errorCode); log.warn(msg); } resourceResponse.setError(errorCode, errorMessage); return resourceResponse; } @Override public boolean isCachingEnabled() { return cachingEnabled; } public void setCachingEnabled(final boolean enabled) { cachingEnabled = enabled; } @Override public Serializable getCacheKey() { ArrayList<Serializable> key = new ArrayList<>(providedResources.size()); for (IReferenceHeaderItem curItem : providedResources) { Serializable curKey = ((IStaticCacheableResource)curItem.getReference().getResource()).getCacheKey(); if (curKey == null) { reportError(curItem.getReference(), "Unable to get cache key for "); return null; } key.add(curKey); } return key; } /** * If a bundle resource is missing then throws a {@link MissingResourceException} if * {@link org.apache.wicket.settings.ResourceSettings#getThrowExceptionOnMissingResource()} * says so, or logs a warning message if the logging level allows * @param reference * The resource reference to the missing resource * @param prefix * The error message prefix */ private void reportError(ResourceReference reference, String prefix) { String scope = Classes.name(reference.getScope()); String name = reference.getName(); String message = prefix + reference.toString(); if (getThrowExceptionOnMissingResource()) { throw new MissingResourceException(message, scope, name); } else if (log.isWarnEnabled()) { log.warn(message); } } @Override public IResourceStream getResourceStream() { List<IResourceStream> streams = collectResourceStreams(); if (streams == null) { return null; } final String contentType = findContentType(streams); final Instant lastModified = findLastModified(streams); AbstractResourceStream ret = new AbstractResourceStream() { private static final long serialVersionUID = 1L; private byte[] bytes; private ByteArrayInputStream inputStream; private byte[] getBytes() { if (bytes == null) { try { bytes = readAllResources(streams); } catch (IOException e) { return null; } catch (ResourceStreamNotFoundException e) { return null; } } return bytes; } @Override public InputStream getInputStream() throws ResourceStreamNotFoundException { if (inputStream == null) { inputStream = new ByteArrayInputStream(getBytes()); } return inputStream; } @Override public Bytes length() { return Bytes.bytes(getBytes().length); } @Override public String getContentType() { return contentType; } @Override public Instant lastModifiedTime() { return lastModified; } @Override public void close() throws IOException { if (inputStream != null) { inputStream.close(); } } }; return ret; } public void setCompressor(ITextResourceCompressor compressor) { this.compressor = compressor; } public ITextResourceCompressor getCompressor() { return compressor; } /** * @return the result of {@link org.apache.wicket.settings.ResourceSettings#getThrowExceptionOnMissingResource()} */ protected boolean getThrowExceptionOnMissingResource() { return Application.get().getResourceSettings().getThrowExceptionOnMissingResource(); } }
package crazypants.enderio.base.machine.baselegacy; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.enderio.core.api.common.util.IProgressTile; import com.enderio.core.common.util.NNList; import crazypants.enderio.api.capacitor.ICapacitorKey; import crazypants.enderio.base.config.config.MachineConfig; import crazypants.enderio.base.machine.interfaces.IPoweredTask; import crazypants.enderio.base.machine.task.PoweredTask; import crazypants.enderio.base.machine.task.PoweredTaskProgress; import crazypants.enderio.base.recipe.IMachineRecipe; import crazypants.enderio.base.recipe.IMachineRecipe.ResultStack; import crazypants.enderio.base.recipe.MachineRecipeInput; import crazypants.enderio.base.recipe.MachineRecipeRegistry; import crazypants.enderio.util.Prep; import info.loenwind.autosave.annotations.Storable; import info.loenwind.autosave.annotations.Store; import info.loenwind.autosave.util.NBTAction; import net.minecraft.block.Block; import net.minecraft.item.ItemStack; import net.minecraft.tileentity.TileEntity; @Storable public abstract class AbstractPoweredTaskEntity extends AbstractPowerConsumerEntity implements IProgressTile { @Store({ NBTAction.SAVE, NBTAction.ITEM }) protected IPoweredTask currentTask = null; @Store({ NBTAction.SAVE, NBTAction.ITEM }) protected IMachineRecipe lastCompletedRecipe; protected IMachineRecipe cachedNextRecipe; protected int ticksSinceCheckedRecipe = 0; protected boolean startFailed = false; private Long theNextSeed = null; protected AbstractPoweredTaskEntity(@Nonnull SlotDefinition slotDefinition, @Nonnull ICapacitorKey maxEnergyRecieved, @Nonnull ICapacitorKey maxEnergyStored, @Nonnull ICapacitorKey maxEnergyUsed) { super(slotDefinition, maxEnergyRecieved, maxEnergyStored, maxEnergyUsed); } @Override public boolean isActive() { return currentTask == null ? false : currentTask.getProgress() >= 0 && hasPower() && redstoneCheckPassed; } @Override public float getProgress() { return currentTask == null ? -1 : currentTask.getProgress(); } @Override public @Nonnull TileEntity getTileEntity() { return this; } @Override public void setProgress(float progress) { this.currentTask = progress < 0 ? null : new PoweredTaskProgress(progress); } public IPoweredTask getCurrentTask() { return currentTask; } public float getExperienceForOutput(@Nonnull ItemStack output) { if (lastCompletedRecipe == null) { return 0; } return lastCompletedRecipe.getExperienceForOutput(output); } public boolean getRedstoneChecksPassed() { return redstoneCheckPassed; } @Override protected void processTasks(boolean redstoneChecksPassed) { if (!redstoneChecksPassed) { return; } // Process any current items checkProgress(redstoneChecksPassed); if (currentTask != null || !hasPower() || !hasInputStacks()) { return; } if (startFailed) { ticksSinceCheckedRecipe++; if (ticksSinceCheckedRecipe < MachineConfig.sleepBetweenFailedTries.get()) { return; } } ticksSinceCheckedRecipe = 0; // Get a new chance when we don't have one yet // If a recipe could not be started we will try with the same chance next time if (theNextSeed == null) { theNextSeed = random.nextLong(); } // Then see if we need to start a new one IMachineRecipe nextRecipe = canStartNextTask(theNextSeed); if (nextRecipe != null) { boolean started = startNextTask(nextRecipe, theNextSeed); if (started) { // this chance value has been used up theNextSeed = null; } startFailed = !started; } else { startFailed = true; } } protected void checkProgress(boolean redstoneChecksPassed) { if (currentTask == null || !hasPower()) { return; } if (redstoneChecksPassed && !currentTask.isComplete()) { usePower(); if (shouldDoubleTick(currentTask, getPowerUsePerTick())) { usePower(); } } // then check if we are done if (currentTask.isComplete()) { taskComplete(); return; } return; } @Override protected int usePower(int wantToUse) { int used = super.usePower(wantToUse); if (currentTask != null) { currentTask.update(used * getEfficiencyMultiplier()); } return used; } protected void taskComplete() { if (currentTask != null) { lastCompletedRecipe = currentTask.getRecipe(); ResultStack[] output = currentTask.getCompletedResult(); if (output.length > 0) { mergeResults(output); } } damageCapacitor(); markDirty(); currentTask = null; } protected void mergeResults(@Nonnull ResultStack[] results) { final int numOutputSlots = slotDefinition.getNumOutputSlots(); if (numOutputSlots > 0) { List<ItemStack> outputStacks = new ArrayList<ItemStack>(numOutputSlots); for (int i = slotDefinition.minOutputSlot; i <= slotDefinition.maxOutputSlot; i++) { ItemStack it = inventory[i]; if (it != null && Prep.isValid(it)) { it = it.copy(); } outputStacks.add(it); } for (ResultStack result : results) { if (Prep.isValid(result.item)) { int numMerged = mergeItemResult(result.item, outputStacks); if (numMerged > 0) { result.item.shrink(numMerged); } } else if (result.fluid != null) { mergeFluidResult(result); } } int listIndex = 0; for (int i = slotDefinition.minOutputSlot; i <= slotDefinition.maxOutputSlot; i++) { ItemStack st = outputStacks.get(listIndex); if (st != null && Prep.isValid(st)) { st = st.copy(); } inventory[i] = st; listIndex++; } } else { for (ResultStack result : results) { if (Prep.isValid(result.item)) { Block.spawnAsEntity(world, pos, result.item.copy()); result.item.setCount(0); } else if (result.fluid != null) { mergeFluidResult(result); } } } cachedNextRecipe = null; } protected void mergeFluidResult(@Nonnull ResultStack result) { } protected void drainInputFluid(@Nonnull MachineRecipeInput fluid) { } protected boolean canInsertResultFluid(@Nonnull ResultStack fluid) { return false; } protected int mergeItemResult(@Nonnull ItemStack item, @Nonnull List<ItemStack> outputStacks) { ItemStack copy = item.copy(); if (Prep.isInvalid(copy)) { return 0; } int firstFreeSlot = -1; // try to add it to existing stacks first for (int i = 0; i < outputStacks.size(); i++) { ItemStack outStack = outputStacks.get(i); if (outStack != null && Prep.isValid(outStack)) { int num = getNumCanMerge(outStack, copy); outStack.grow(num); copy.shrink(num); if (Prep.isInvalid(copy)) { return item.getCount(); } } else if (firstFreeSlot < 0) { firstFreeSlot = i; } } // Try and add it to an empty slot if (firstFreeSlot >= 0) { outputStacks.set(firstFreeSlot, copy); return item.getCount(); } return 0; } protected @Nonnull NNList<MachineRecipeInput> getRecipeInputs() { NNList<MachineRecipeInput> res = new NNList<>(); for (int slot = slotDefinition.minInputSlot; slot <= slotDefinition.maxInputSlot; slot++) { final ItemStack item = getStackInSlot(slot); if (Prep.isValid(item)) { res.add(new MachineRecipeInput(slot, item)); } } return res; } protected @Nullable IMachineRecipe getNextRecipe() { if (cachedNextRecipe == null) { cachedNextRecipe = MachineRecipeRegistry.instance.getRecipeForInputs(getMachineLevel(), getMachineName(), getRecipeInputs()); } return cachedNextRecipe; } protected IMachineRecipe canStartNextTask(long nextSeed) { IMachineRecipe nextRecipe = getNextRecipe(); if (nextRecipe == null) { return null; // no template } // make sure we have room for the next output return canInsertResult(nextSeed, nextRecipe) ? nextRecipe : null; } protected boolean canInsertResult(long nextSeed, @Nonnull IMachineRecipe nextRecipe) { final IPoweredTask task = createTask(nextRecipe, nextSeed); if (task == null) { return false; } ResultStack[] nextResults = task.getCompletedResult(); List<ItemStack> outputStacks = null; final int numOutputSlots = slotDefinition.getNumOutputSlots(); if (numOutputSlots > 0) { outputStacks = new ArrayList<ItemStack>(numOutputSlots); boolean allFull = true; for (int i = slotDefinition.minOutputSlot; i <= slotDefinition.maxOutputSlot; i++) { ItemStack st = inventory[i]; if (st != null && Prep.isValid(st)) { st = st.copy(); if (allFull && st.getCount() < st.getMaxStackSize()) { allFull = false; } } else { allFull = false; } outputStacks.add(st); } if (allFull) { return false; } } for (ResultStack result : nextResults) { if (Prep.isValid(result.item)) { if (outputStacks == null || mergeItemResult(result.item, outputStacks) == 0) { return false; } } else if (result.fluid != null) { if (!canInsertResultFluid(result)) { return false; } } } return true; } protected boolean hasInputStacks() { int fromSlot = slotDefinition.minInputSlot; for (int i = 0; i < slotDefinition.getNumInputSlots(); i++) { final ItemStack itemStack = inventory[fromSlot]; if (itemStack != null && Prep.isValid(itemStack)) { return true; } fromSlot++; } return false; } protected boolean startNextTask(@Nonnull IMachineRecipe nextRecipe, long nextSeed) { if (hasPower() && nextRecipe.isRecipe(getMachineLevel(), getRecipeInputs())) { // then get our recipe and take away the source items currentTask = createTask(nextRecipe, nextSeed); List<MachineRecipeInput> consumed = nextRecipe.getQuantitiesConsumed(getRecipeInputs()); for (MachineRecipeInput item : consumed) { if (item != null) { if (Prep.isValid(item.item)) { getStackInSlot(item.slotNumber).shrink(item.item.getCount()); } else if (item.fluid != null) { drainInputFluid(item); } } } return true; } return false; } protected @Nullable IPoweredTask createTask(@Nonnull IMachineRecipe nextRecipe, long nextSeed) { return new PoweredTask(nextRecipe, nextSeed, getRecipeInputs()); } @Override protected void onAfterNbtRead() { super.onAfterNbtRead(); cachedNextRecipe = null; } @Override public void setInventorySlotContents(int slot, @Nonnull ItemStack contents) { super.setInventorySlotContents(slot, contents); if (slotDefinition.isInputSlot(slot)) { cachedNextRecipe = null; startFailed = false; // skip re-check delay, see #ticksSinceCheckedRecipe } } // task machines need to return a valid constant from MachineRecipeRegistry @Override public abstract @Nonnull String getMachineName(); protected boolean shouldDoubleTick(@Nonnull IPoweredTask task, int usedEnergy) { return false; } }
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.cli.fs.command; import alluxio.AlluxioURI; import alluxio.Configuration; import alluxio.Constants; import alluxio.PropertyKey; import alluxio.client.ReadType; import alluxio.client.file.FileInStream; import alluxio.client.file.FileOutStream; import alluxio.client.file.FileSystem; import alluxio.client.file.URIStatus; import alluxio.client.file.options.CreateFileOptions; import alluxio.client.file.options.OpenFileOptions; import alluxio.client.file.policy.FileWriteLocationPolicy; import alluxio.exception.AlluxioException; import alluxio.exception.ExceptionMessage; import alluxio.exception.FileAlreadyExistsException; import alluxio.exception.FileDoesNotExistException; import alluxio.exception.InvalidPathException; import alluxio.cli.fs.FileSystemShellUtils; import alluxio.util.CommonUtils; import alluxio.util.io.PathUtils; import com.google.common.base.Joiner; import com.google.common.io.Closer; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.RandomStringUtils; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.List; import javax.annotation.concurrent.ThreadSafe; /** * Copies a file or a directory in the Alluxio filesystem. */ @ThreadSafe public final class CpCommand extends AbstractFileSystemCommand { private static final Option RECURSIVE_OPTION = Option.builder("R") .required(false) .hasArg(false) .desc("copy files in subdirectories recursively") .build(); /** * @param fs the filesystem of Alluxio */ public CpCommand(FileSystem fs) { super(fs); } @Override public String getCommandName() { return "cp"; } @Override protected int getNumOfArgs() { return 2; } @Override public Options getOptions() { return new Options().addOption(RECURSIVE_OPTION); } @Override public int run(CommandLine cl) throws AlluxioException, IOException { String[] args = cl.getArgs(); AlluxioURI srcPath = new AlluxioURI(args[0]); AlluxioURI dstPath = new AlluxioURI(args[1]); if ((dstPath.getScheme() == null || isAlluxio(dstPath.getScheme())) && isFile(srcPath.getScheme())) { List<File> srcFiles = FileSystemShellUtils.getFiles(srcPath.getPath()); if (srcFiles.size() == 0) { throw new IOException(ExceptionMessage.PATH_DOES_NOT_EXIST.getMessage(srcPath)); } if (srcPath.containsWildcard()) { List<AlluxioURI> srcPaths = new ArrayList<>(); for (File srcFile : srcFiles) { srcPaths.add( new AlluxioURI(srcPath.getScheme(), srcPath.getAuthority(), srcFile.getPath())); } copyFromLocalWildcard(srcPaths, dstPath); } else { copyFromLocal(srcPath, dstPath); } } else if ((srcPath.getScheme() == null || isAlluxio(srcPath.getScheme())) && isFile(dstPath.getScheme())) { List<AlluxioURI> srcPaths = FileSystemShellUtils.getAlluxioURIs(mFileSystem, srcPath); if (srcPaths.size() == 0) { throw new IOException(ExceptionMessage.PATH_DOES_NOT_EXIST.getMessage(srcPath)); } if (srcPath.containsWildcard()) { copyWildcardToLocal(srcPaths, dstPath); } else { copyToLocal(srcPath, dstPath); } } else if ((srcPath.getScheme() == null || isAlluxio(srcPath.getScheme())) && (dstPath.getScheme() == null || isAlluxio(dstPath.getScheme()))) { List<AlluxioURI> srcPaths = FileSystemShellUtils.getAlluxioURIs(mFileSystem, srcPath); if (srcPaths.size() == 0) { throw new FileDoesNotExistException( ExceptionMessage.PATH_DOES_NOT_EXIST.getMessage(srcPath.getPath())); } if (srcPath.containsWildcard()) { copyWildcard(srcPaths, dstPath, cl.hasOption("R")); } else { copy(srcPath, dstPath, cl.hasOption("R")); } } else { throw new InvalidPathException( "Schemes must be either file or alluxio, and at most one file scheme is allowed."); } return 0; } /** * Copies a list of files or directories specified by srcPaths to the destination specified by * dstPath. This method is used when the original source path contains wildcards. * * @param srcPaths a list of files or directories in the Alluxio filesystem * @param dstPath the destination in the Alluxio filesystem * @param recursive indicates whether directories should be copied recursively */ private void copyWildcard(List<AlluxioURI> srcPaths, AlluxioURI dstPath, boolean recursive) throws AlluxioException, IOException { URIStatus dstStatus = null; try { dstStatus = mFileSystem.getStatus(dstPath); } catch (FileDoesNotExistException e) { // if the destination does not exist, it will be created } if (dstStatus != null && !dstStatus.isFolder()) { throw new InvalidPathException(ExceptionMessage.DESTINATION_CANNOT_BE_FILE.getMessage()); } if (dstStatus == null) { mFileSystem.createDirectory(dstPath); System.out.println("Created directory: " + dstPath); } List<String> errorMessages = new ArrayList<>(); for (AlluxioURI srcPath : srcPaths) { try { copy(srcPath, new AlluxioURI(dstPath.getScheme(), dstPath.getAuthority(), PathUtils.concatPath(dstPath.getPath(), srcPath.getName())), recursive); } catch (AlluxioException | IOException e) { errorMessages.add(e.getMessage()); } } if (errorMessages.size() != 0) { throw new IOException(Joiner.on('\n').join(errorMessages)); } } /** * Copies a file or a directory in the Alluxio filesystem. * * @param srcPath the source {@link AlluxioURI} (could be a file or a directory) * @param dstPath the {@link AlluxioURI} of the destination path in the Alluxio filesystem * @param recursive indicates whether directories should be copied recursively */ private void copy(AlluxioURI srcPath, AlluxioURI dstPath, boolean recursive) throws AlluxioException, IOException { URIStatus srcStatus = mFileSystem.getStatus(srcPath); URIStatus dstStatus = null; try { dstStatus = mFileSystem.getStatus(dstPath); } catch (FileDoesNotExistException e) { // if the destination does not exist, it will be created } if (!srcStatus.isFolder()) { if (dstStatus != null && dstStatus.isFolder()) { dstPath = new AlluxioURI(PathUtils.concatPath(dstPath.getPath(), srcPath.getName())); } copyFile(srcPath, dstPath); } else { if (!recursive) { throw new IOException( srcPath.getPath() + " is a directory, to copy it please use \"cp -R <src> <dst>\""); } List<URIStatus> statuses; statuses = mFileSystem.listStatus(srcPath); if (dstStatus != null) { if (!dstStatus.isFolder()) { throw new InvalidPathException(ExceptionMessage.DESTINATION_CANNOT_BE_FILE.getMessage()); } // if copying a directory to an existing directory, the copied directory will become a // subdirectory of the destination if (srcStatus.isFolder()) { dstPath = new AlluxioURI(PathUtils.concatPath(dstPath.getPath(), srcPath.getName())); mFileSystem.createDirectory(dstPath); System.out.println("Created directory: " + dstPath); } } if (dstStatus == null) { mFileSystem.createDirectory(dstPath); System.out.println("Created directory: " + dstPath); } List<String> errorMessages = new ArrayList<>(); for (URIStatus status : statuses) { try { copy(new AlluxioURI(srcPath.getScheme(), srcPath.getAuthority(), status.getPath()), new AlluxioURI(dstPath.getScheme(), dstPath.getAuthority(), PathUtils.concatPath(dstPath.getPath(), status.getName())), recursive); } catch (IOException e) { errorMessages.add(e.getMessage()); } } if (errorMessages.size() != 0) { throw new IOException(Joiner.on('\n').join(errorMessages)); } } } /** * Copies a file in the Alluxio filesystem. * * @param srcPath the source {@link AlluxioURI} (has to be a file) * @param dstPath the destination path in the Alluxio filesystem */ private void copyFile(AlluxioURI srcPath, AlluxioURI dstPath) throws AlluxioException, IOException { try (Closer closer = Closer.create()) { OpenFileOptions openFileOptions = OpenFileOptions.defaults().setReadType(ReadType.NO_CACHE); FileInStream is = closer.register(mFileSystem.openFile(srcPath, openFileOptions)); CreateFileOptions createFileOptions = CreateFileOptions.defaults(); FileOutStream os = closer.register(mFileSystem.createFile(dstPath, createFileOptions)); try { IOUtils.copy(is, os); } catch (Exception e) { os.cancel(); throw e; } System.out.println("Copied " + srcPath + " to " + dstPath); } } /** * Copies a directory from local to Alluxio filesystem. The destination directory structure * maintained as local directory. This method is used when input path is a directory. * * @param srcPath the {@link AlluxioURI} of the source directory in the local filesystem * @param dstPath the {@link AlluxioURI} of the destination */ private void copyFromLocalDir(AlluxioURI srcPath, AlluxioURI dstPath) throws AlluxioException, IOException { File srcDir = new File(srcPath.getPath()); boolean dstExistedBefore = mFileSystem.exists(dstPath); createDstDir(dstPath); List<String> errorMessages = new ArrayList<>(); File[] fileList = srcDir.listFiles(); if (fileList == null) { String errMsg = String.format("Failed to list files for directory %s", srcDir); errorMessages.add(errMsg); fileList = new File[0]; } int misFiles = 0; for (File srcFile : fileList) { AlluxioURI newURI = new AlluxioURI(dstPath, new AlluxioURI(srcFile.getName())); try { copyPath( new AlluxioURI(srcPath.getScheme(), srcPath.getAuthority(), srcFile.getPath()), newURI); } catch (AlluxioException | IOException e) { errorMessages.add(e.getMessage()); if (!mFileSystem.exists(newURI)) { misFiles++; } } } if (errorMessages.size() != 0) { if (misFiles == fileList.length) { // If the directory doesn't exist and no files were created, then delete the directory if (!dstExistedBefore && mFileSystem.exists(dstPath)) { mFileSystem.delete(dstPath); } } throw new IOException(Joiner.on('\n').join(errorMessages)); } } /** * Copies a list of files or directories specified by srcPaths from the local filesystem to * dstPath in the Alluxio filesystem space. This method is used when the input path contains * wildcards. * * @param srcPaths a list of files or directories in the local filesystem * @param dstPath the {@link AlluxioURI} of the destination */ private void copyFromLocalWildcard(List<AlluxioURI> srcPaths, AlluxioURI dstPath) throws AlluxioException, IOException { boolean dstExistedBefore = mFileSystem.exists(dstPath); createDstDir(dstPath); List<String> errorMessages = new ArrayList<>(); int misFiles = 0; for (AlluxioURI srcPath : srcPaths) { AlluxioURI newURI = new AlluxioURI(dstPath, new AlluxioURI(srcPath.getName())); try { copyPath(srcPath, newURI); System.out.println("Copied " + srcPath + " to " + dstPath); } catch (AlluxioException | IOException e) { errorMessages.add(e.getMessage()); if (!mFileSystem.exists(newURI)) { misFiles++; } } } if (errorMessages.size() != 0) { if (misFiles == srcPaths.size()) { // If the directory doesn't exist and no files were created, then delete the directory if (!dstExistedBefore && mFileSystem.exists(dstPath)) { mFileSystem.delete(dstPath); } } throw new IOException(Joiner.on('\n').join(errorMessages)); } } /** * Creates a directory in the Alluxio filesystem space. It will not throw any exception if the * destination directory already exists. * * @param dstPath the {@link AlluxioURI} of the destination directory which will be created */ private void createDstDir(AlluxioURI dstPath) throws AlluxioException, IOException { try { mFileSystem.createDirectory(dstPath); } catch (FileAlreadyExistsException e) { // it's fine if the directory already exists } URIStatus dstStatus = mFileSystem.getStatus(dstPath); if (!dstStatus.isFolder()) { throw new InvalidPathException(ExceptionMessage.DESTINATION_CANNOT_BE_FILE.getMessage()); } } /** * Copies a file or directory specified by srcPath from the local filesystem to dstPath in the * Alluxio filesystem space. * * @param srcPath the {@link AlluxioURI} of the source in the local filesystem * @param dstPath the {@link AlluxioURI} of the destination */ private void copyFromLocal(AlluxioURI srcPath, AlluxioURI dstPath) throws AlluxioException, IOException { File srcFile = new File(srcPath.getPath()); if (srcFile.isDirectory()) { copyFromLocalDir(srcPath, dstPath); } else { copyPath(srcPath, dstPath); } System.out.println("Copied " + srcPath + " to " + dstPath); } /** * Copies a file or directory specified by srcPath from the local filesystem to dstPath in the * Alluxio filesystem space. * * @param srcPath the {@link AlluxioURI} of the source file in the local filesystem * @param dstPath the {@link AlluxioURI} of the destination */ private void copyPath(AlluxioURI srcPath, AlluxioURI dstPath) throws AlluxioException, IOException { File src = new File(srcPath.getPath()); if (!src.isDirectory()) { // If the dstPath is a directory, then it should be updated to be the path of the file where // src will be copied to. if (mFileSystem.exists(dstPath) && mFileSystem.getStatus(dstPath).isFolder()) { dstPath = dstPath.join(src.getName()); } FileOutStream os = null; try (Closer closer = Closer.create()) { FileWriteLocationPolicy locationPolicy; locationPolicy = CommonUtils.createNewClassInstance( Configuration.<FileWriteLocationPolicy>getClass( PropertyKey.USER_FILE_COPY_FROM_LOCAL_WRITE_LOCATION_POLICY), new Class[] {}, new Object[] {}); os = closer.register(mFileSystem.createFile(dstPath, CreateFileOptions.defaults().setLocationPolicy(locationPolicy))); FileInputStream in = closer.register(new FileInputStream(src)); FileChannel channel = closer.register(in.getChannel()); ByteBuffer buf = ByteBuffer.allocate(8 * Constants.MB); while (channel.read(buf) != -1) { buf.flip(); os.write(buf.array(), 0, buf.limit()); } } catch (Exception e) { // Close the out stream and delete the file, so we don't have an incomplete file lying // around. if (os != null) { os.cancel(); if (mFileSystem.exists(dstPath)) { mFileSystem.delete(dstPath); } } throw e; } } else { mFileSystem.createDirectory(dstPath); List<String> errorMessages = new ArrayList<>(); File[] fileList = src.listFiles(); if (fileList == null) { String errMsg = String.format("Failed to list files for directory %s", src); errorMessages.add(errMsg); fileList = new File[0]; } int misFiles = 0; for (File srcFile : fileList) { AlluxioURI newURI = new AlluxioURI(dstPath, new AlluxioURI(srcFile.getName())); try { copyPath( new AlluxioURI(srcPath.getScheme(), srcPath.getAuthority(), srcFile.getPath()), newURI); } catch (IOException e) { errorMessages.add(e.getMessage()); if (!mFileSystem.exists(newURI)) { misFiles++; } } } if (errorMessages.size() != 0) { if (misFiles == fileList.length) { // If the directory doesn't exist and no files were created, then delete the directory if (mFileSystem.exists(dstPath)) { mFileSystem.delete(dstPath); } } throw new IOException(Joiner.on('\n').join(errorMessages)); } } } /** * Copies a list of files or directories specified by srcPaths from the Alluxio filesystem to * dstPath in the local filesystem. This method is used when the input path contains wildcards. * * @param srcPaths the list of files in the Alluxio filesystem * @param dstPath the {@link AlluxioURI} of the destination directory in the local filesystem */ private void copyWildcardToLocal(List<AlluxioURI> srcPaths, AlluxioURI dstPath) throws AlluxioException, IOException { File dstFile = new File(dstPath.getPath()); if (dstFile.exists() && !dstFile.isDirectory()) { throw new InvalidPathException(ExceptionMessage.DESTINATION_CANNOT_BE_FILE.getMessage()); } if (!dstFile.exists()) { if (!dstFile.mkdirs()) { throw new IOException("Fail to create directory: " + dstPath); } else { System.out.println("Create directory: " + dstPath); } } List<String> errorMessages = new ArrayList<>(); for (AlluxioURI srcPath : srcPaths) { try { File dstSubFile = new File(dstFile.getAbsoluteFile(), srcPath.getName()); copyToLocal(srcPath, new AlluxioURI(dstPath.getScheme(), dstPath.getAuthority(), dstSubFile.getPath())); } catch (IOException e) { errorMessages.add(e.getMessage()); } } if (errorMessages.size() != 0) { throw new IOException(Joiner.on('\n').join(errorMessages)); } } /** * Copies a file or a directory from the Alluxio filesystem to the local filesystem. * * @param srcPath the source {@link AlluxioURI} (could be a file or a directory) * @param dstPath the {@link AlluxioURI} of the destination in the local filesystem */ private void copyToLocal(AlluxioURI srcPath, AlluxioURI dstPath) throws AlluxioException, IOException { URIStatus srcStatus = mFileSystem.getStatus(srcPath); File dstFile = new File(dstPath.getPath()); if (srcStatus.isFolder()) { // make a local directory if (!dstFile.exists()) { if (!dstFile.mkdirs()) { throw new IOException("mkdir failure for directory: " + dstPath); } else { System.out.println("Create directory: " + dstPath); } } List<URIStatus> statuses; try { statuses = mFileSystem.listStatus(srcPath); } catch (AlluxioException e) { throw new IOException(e.getMessage()); } List<String> errorMessages = new ArrayList<>(); for (URIStatus status : statuses) { try { File subDstFile = new File(dstFile.getAbsolutePath(), status.getName()); copyToLocal( new AlluxioURI(srcPath.getScheme(), srcPath.getAuthority(), status.getPath()), new AlluxioURI(dstPath.getScheme(), dstPath.getAuthority(), subDstFile.getPath())); } catch (IOException e) { errorMessages.add(e.getMessage()); } } if (errorMessages.size() != 0) { throw new IOException(Joiner.on('\n').join(errorMessages)); } } else { copyFileToLocal(srcPath, dstPath); } } /** * Copies a file specified by argv from the filesystem to the local filesystem. This is the * utility function. * * @param srcPath The source {@link AlluxioURI} (has to be a file) * @param dstPath The {@link AlluxioURI} of the destination in the local filesystem */ private void copyFileToLocal(AlluxioURI srcPath, AlluxioURI dstPath) throws AlluxioException, IOException { File dstFile = new File(dstPath.getPath()); String randomSuffix = String.format(".%s_copyToLocal_", RandomStringUtils.randomAlphanumeric(8)); File outputFile; if (dstFile.isDirectory()) { outputFile = new File(PathUtils.concatPath(dstFile.getAbsolutePath(), srcPath.getName())); } else { outputFile = dstFile; } File tmpDst = new File(outputFile.getPath() + randomSuffix); try (Closer closer = Closer.create()) { OpenFileOptions options = OpenFileOptions.defaults().setReadType(ReadType.NO_CACHE); FileInStream is = closer.register(mFileSystem.openFile(srcPath, options)); FileOutputStream out = closer.register(new FileOutputStream(tmpDst)); byte[] buf = new byte[64 * Constants.MB]; int t = is.read(buf); while (t != -1) { out.write(buf, 0, t); t = is.read(buf); } if (!tmpDst.renameTo(outputFile)) { throw new IOException( "Failed to rename " + tmpDst.getPath() + " to destination " + outputFile.getPath()); } System.out.println("Copied " + srcPath + " to " + "file://" + outputFile.getPath()); } finally { tmpDst.delete(); } } @Override public String getUsage() { return "cp [-R] <src> <dst>"; } @Override public String getDescription() { return "Copies a file or a directory in the Alluxio filesystem or between local filesystem " + "and Alluxio filesystem. The -R flag is needed to copy directories in the Alluxio " + "filesystem. Local Path with schema \"file\"."; } private static boolean isAlluxio(String scheme) { return Constants.SCHEME.equals(scheme); } private static boolean isFile(String scheme) { return "file".equals(scheme); } }
/** * */ package ua.com.fielden.platform.example.swing.treemenu.withtabs; import java.awt.event.ActionEvent; import javax.swing.JButton; import javax.swing.JPanel; import net.miginfocom.swing.MigLayout; import ua.com.fielden.platform.example.swing.treemenu.DemoUiModel; import ua.com.fielden.platform.swing.actions.BlockingLayerCommand; import ua.com.fielden.platform.swing.actions.Command; import ua.com.fielden.platform.swing.components.blocking.BlockingIndefiniteProgressPane; import ua.com.fielden.platform.swing.menu.SimpleInfoPanel; import ua.com.fielden.platform.swing.menu.TreeMenuItem; import ua.com.fielden.platform.swing.menu.TreeMenuPanel; import ua.com.fielden.platform.swing.menu.TreeMenuWithTabs; import ua.com.fielden.platform.swing.menu.filter.WordFilter; import ua.com.fielden.platform.swing.model.ICloseGuard; import ua.com.fielden.platform.swing.view.BaseNotifPanel; import ua.com.fielden.platform.swing.view.MasterPanel; /** * Panel used for demonstration purposes * * @author Yura * @author 01es */ public class TreeMenuWithTabsDemoPanel extends MasterPanel { private static final long serialVersionUID = 6487516858880182913L; @SuppressWarnings("unchecked") public TreeMenuWithTabsDemoPanel(final BlockingIndefiniteProgressPane blockingPane) { final TreeMenuItem menu = new TreeMenuItem("root", "root panel"); final BaseNotifPanel masterPanel = new BaseNotifPanel("Master Panel", new DemoUiModel(true)) { private static final long serialVersionUID = 1L; @Override public String getInfo() { return "<html>" + "<h3>Master Panel</h3>" + "Used for Work Order editing." + "</html>"; } @Override public ICloseGuard canClose() { return null; } @Override public String whyCannotClose() { return "can reason"; } }; final BaseNotifPanel costPanel = new BaseNotifPanel("Cost Panel", new DemoUiModel(true)) { private static final long serialVersionUID = 1L; private ICloseGuard canClose = null; private ICloseGuard thisGuard = this; @Override public String getInfo() { return "<html>" + "<h3>Cost</h3>" + "A facility for reviewing work order cost." + "</html>"; } @Override public ICloseGuard canClose() { return canClose; } @Override public String whyCannotClose() { return "Costs are not saved"; } @Override protected void layoutComponents() { super.layoutComponents(); final JPanel bodyPanel = new JPanel(new MigLayout("fill, insets 0", "[fill, grow]", "[c,grow,fill][]")); bodyPanel.add(new JPanel(), "wrap"); final JPanel actionPanel = new JPanel(new MigLayout("fill, insets 0", "[c, fill, grow][c, fill, grow]", "[]")); actionPanel.add(new JButton(new BlockingLayerCommand<Void>("Can Close", getBlockingLayer()) { private static final long serialVersionUID = 1L; @Override protected boolean preAction() { super.preAction(); setBlockingMessage("Making can close... one sec delay"); return true; } @Override protected Void action(final ActionEvent e) throws Exception { Thread.sleep(1000); canClose = null; return null; } @Override protected void postAction(final Void value) { setBlockingMessage("Completed"); super.postAction(value); } }), "w :100:200"); actionPanel.add(new JButton(new Command<Void>("Cannot Close") { private static final long serialVersionUID = 1L; @Override protected Void action(final ActionEvent e) throws Exception { canClose = thisGuard; return null; } }), "w :100:200"); bodyPanel.add(actionPanel); add(bodyPanel); } }; final String costPanelInfo = "<html>" + "<h3>Cost Info</h3>" + "Cost menu item demonstrates a case where a menu item cannot be closed without first finalising some action." + "<br/><br/> " + "By default the menu's view is closable. Button <i>Cannot Close</i> should be pressed to emulate some unclosable state. " + "Once pressed no other menu item can be selected or tab closed until the state is changed to closable. " + "This can be achieved by pressing button <i>Can Close</i>." + "</html>"; final TreeMenuItem masterMenu = new TreeMenuItem(masterPanel).// addItem(new TreeMenuItem(costPanel, costPanel.toString(), new SimpleInfoPanel(costPanelInfo), false)); final String indChargesPanelInfo = "<html>" + "<h3>Indirect Charges Info</h3>" + "Unlike other menu items, this menu item does not require a long initialisation (e.g. no db requests) and thus its activation does not involve UI blocking." + "</html>"; final TreeMenuItem indChargeMenu = new TreeMenuItem(new BaseNotifPanel("Indirect Charges Panel", new DemoUiModel(false)) { private static final long serialVersionUID = 1L; @Override public String getInfo() { return "<html>" + "<h3>Indirect Charges</h3>" + "Something to do with wokr order cost entry..." + "</html>"; } @Override public ICloseGuard canClose() { return null; } @Override public String whyCannotClose() { return "can change"; } },// "Indirect Charges Panel",// new SimpleInfoPanel(indChargesPanelInfo), false).addItem(new TreeMenuItem(new BaseNotifPanel("Ind sub Panel", new DemoUiModel(true)) { private static final long serialVersionUID = 1L; @Override public String getInfo() { return "<html>" + "<h3>Indirect Charges Subpanel</h3>" + "Something to do with wokr order cost entry..." + "</html>"; } @Override public ICloseGuard canClose() { return null; } @Override public String whyCannotClose() { return "can change"; } })); menu.add(masterMenu); menu.add(indChargeMenu); setOneTouchExpandable(true); final String defaultInfo = "<html>" + "<h2>Information</h2>" + "This application demonstrates features of the tree menu controls with tab support where each menu item has a designated tab." + "<br/><br/> " + "Simple selection of a menu item does not open its content (the view), but instead displays its information panel. " + "Some menu items may not be provided with an information panel. " + "In this case a general window information panel is displayed." + "<br/><br/>" + "Double clicking a menu item or selecting it and pressing the enter key loads item's content (also known as item's view) on a separate tab. " + "This way navigation between menu items can be done using tabs or the tree menu." + "Please note that opening a menu item automatically bring the input focus to its view." + "Once a menu is open selecting it in the tree automatically activates a corresponding tab. Otherwise, its information panel is displayed." + "<br/><br/>" + "<h3>Hot Keys</h3>" + "The following hot keys are supported:"// + "<ul>"// + " <li>CTRL+1 -- focuses the tree menu."// + " <li>CTRL+2 -- focuses the tree filter."// + " <li>CTRL+3 -- focuses the tabbed pane."// + " <li>CRTL+I -- activates info tab."// + " <li>CRTL+W -- closes the current tab."// + " <li>CTRL+PAGE_DOWN -- moves to the next tab (circular action)."// + " <li>CTRL+PAGE_UP -- moves to the previous tab (circular action)."// + " <li>ENTER -- when applied to a closed menu item loads a corresponding view into a sepeare tab, or simply focuses a corresponding view in an open tab."// + "</ul>"// + "</html>"; addTreeMenuPanel(new TreeMenuPanel(new TreeMenuWithTabs(menu, new WordFilter(), new SimpleInfoPanel(defaultInfo), blockingPane))); } @Override public String getInfo() { return "Tree Menu With Tabs Demo Panel"; } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.project.client.editor; import java.util.function.Consumer; import javax.enterprise.event.Event; import com.google.gwtmockito.GwtMockitoTestRunner; import com.google.gwtmockito.WithClassesToStub; import org.guvnor.common.services.shared.metadata.model.Metadata; import org.guvnor.common.services.shared.metadata.model.Overview; import org.jboss.errai.common.client.api.Caller; import org.jboss.errai.ioc.client.api.ManagedInstance; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.kie.workbench.common.stunner.client.widgets.presenters.Viewer; import org.kie.workbench.common.stunner.client.widgets.presenters.session.SessionPresenter; import org.kie.workbench.common.stunner.client.widgets.presenters.session.impl.SessionEditorPresenter; import org.kie.workbench.common.stunner.client.widgets.presenters.session.impl.SessionViewerPresenter; import org.kie.workbench.common.stunner.core.client.ManagedInstanceStub; import org.kie.workbench.common.stunner.core.client.api.SessionManager; import org.kie.workbench.common.stunner.core.client.canvas.AbstractCanvasHandler; import org.kie.workbench.common.stunner.core.client.error.DiagramClientErrorHandler; import org.kie.workbench.common.stunner.core.client.i18n.ClientTranslationService; import org.kie.workbench.common.stunner.core.client.preferences.StunnerPreferencesRegistry; import org.kie.workbench.common.stunner.core.client.service.ClientRuntimeError; import org.kie.workbench.common.stunner.core.client.service.ServiceCallback; import org.kie.workbench.common.stunner.core.client.session.command.ClientSessionCommand; import org.kie.workbench.common.stunner.core.client.session.command.impl.ValidateSessionCommand; import org.kie.workbench.common.stunner.core.client.session.impl.EditorSession; import org.kie.workbench.common.stunner.core.client.session.impl.ViewerSession; import org.kie.workbench.common.stunner.core.definition.exception.DefinitionNotFoundException; import org.kie.workbench.common.stunner.core.diagram.Diagram; import org.kie.workbench.common.stunner.core.documentation.DocumentationView; import org.kie.workbench.common.stunner.core.graph.Graph; import org.kie.workbench.common.stunner.core.graph.content.definition.DefinitionSet; import org.kie.workbench.common.stunner.core.preferences.StunnerDiagramEditorPreferences; import org.kie.workbench.common.stunner.core.preferences.StunnerPreferences; import org.kie.workbench.common.stunner.kogito.client.editor.AbstractDiagramEditorMenuSessionItems; import org.kie.workbench.common.stunner.kogito.client.editor.event.OnDiagramFocusEvent; import org.kie.workbench.common.stunner.kogito.client.editor.event.OnDiagramLoseFocusEvent; import org.kie.workbench.common.stunner.kogito.client.session.EditorSessionCommands; import org.kie.workbench.common.stunner.project.client.resources.i18n.StunnerProjectClientConstants; import org.kie.workbench.common.stunner.project.client.screens.ProjectMessagesListener; import org.kie.workbench.common.stunner.project.client.service.ClientProjectDiagramService; import org.kie.workbench.common.stunner.project.diagram.ProjectDiagram; import org.kie.workbench.common.stunner.project.diagram.ProjectMetadata; import org.kie.workbench.common.stunner.project.diagram.editor.ProjectDiagramResource; import org.kie.workbench.common.stunner.project.diagram.impl.ProjectDiagramImpl; import org.kie.workbench.common.stunner.project.service.ProjectDiagramResourceService; import org.kie.workbench.common.widgets.client.docks.DefaultEditorDock; import org.kie.workbench.common.widgets.client.resources.i18n.CommonConstants; import org.kie.workbench.common.widgets.metadata.client.KieEditorWrapperView; import org.kie.workbench.common.widgets.metadata.client.widget.OverviewWidgetPresenter; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.uberfire.backend.vfs.ObservablePath; import org.uberfire.client.mvp.PerspectiveManager; import org.uberfire.client.mvp.PlaceManager; import org.uberfire.client.workbench.events.ChangeTitleWidgetEvent; import org.uberfire.client.workbench.type.ClientResourceType; import org.uberfire.client.workbench.widgets.common.ErrorPopupPresenter; import org.uberfire.ext.editor.commons.client.file.popups.SavePopUpPresenter; import org.uberfire.ext.editor.commons.client.history.VersionRecordManager; import org.uberfire.ext.widgets.core.client.editors.texteditor.TextEditorView; import org.uberfire.mocks.EventSourceMock; import org.uberfire.mvp.Command; import org.uberfire.mvp.ParameterizedCommand; import org.uberfire.mvp.PlaceRequest; import org.uberfire.mvp.impl.PathPlaceRequest; import org.uberfire.workbench.events.NotificationEvent; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentCaptor.forClass; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(GwtMockitoTestRunner.class) @WithClassesToStub(PathPlaceRequest.class) public class ProjectDiagramEditorTest { private static final String SAVE_MESSAGE = "save"; private static final String ERROR_MESSAGE = "error"; @Mock private PerspectiveManager perspectiveManager; @Mock private VersionRecordManager versionRecordManager; @Mock private PlaceRequest placeRequest; @Mock private AbstractProjectDiagramEditor.View view; @Mock private PlaceManager placeManager; @Mock private ErrorPopupPresenter errorPopupPresenter; @Mock private EventSourceMock<ChangeTitleWidgetEvent> changeTitleNotificationEvent; @Mock private SavePopUpPresenter savePopUpPresenter; @Mock private ClientResourceType resourceType; @Mock private ClientProjectDiagramService projectDiagramServices; @Mock private SessionManager clientSessionManager; @Mock private SessionEditorPresenter<EditorSession> sessionEditorPresenter; private ManagedInstance<SessionEditorPresenter<EditorSession>> sessionEditorPresenters; @Mock private SessionViewerPresenter<ViewerSession> sessionViewerPresenter; private ManagedInstance<SessionViewerPresenter<ViewerSession>> sessionViewerPresenters; @Mock private SessionPresenter.View presenterView; @Mock private AbstractDiagramEditorMenuSessionItems sessionItems; @Mock private EditorSessionCommands editorSessionCommands; @Mock private ValidateSessionCommand sessionValidateCommand; @Mock private EditorSession fullSession; @Mock private ObservablePath path; @Mock private ProjectDiagramImpl diagram; @Mock private Graph graph; @Mock private DefinitionSet definitionSetContent; @Mock private ProjectMetadata metadata; @Mock private Overview overview; @Mock private Metadata kieMetadata; @Mock private OverviewWidgetPresenter overviewWidgetMock; @Mock private AbstractCanvasHandler canvasHandler; @Mock private EventSourceMock<OnDiagramFocusEvent> onDiagramFocusEvent; @Mock private EventSourceMock<OnDiagramLoseFocusEvent> onDiagramLostFocusEvent; @Mock private EventSourceMock<NotificationEvent> notificationEvent; @Mock private ProjectMessagesListener projectMessagesListener; @Mock private DiagramClientErrorHandler diagramClientErrorHandler; @Mock private ClientTranslationService translationService; @Captor private ArgumentCaptor<ServiceCallback<ProjectDiagram>> serviceCallbackCaptor; @Captor private ArgumentCaptor<Consumer<String>> consumerCaptor; @Captor private ArgumentCaptor<ParameterizedCommand<String>> savePopupCommandCaptor; @Mock private TextEditorView xmlEditorView; @Mock private StunnerPreferences preferences; @Mock private StunnerDiagramEditorPreferences diagramEditorPreferences; @Mock private StunnerPreferencesRegistry stunnerPreferencesRegistr; @Mock private Caller<ProjectDiagramResourceService> projectDiagramResourceServiceCaller; private ProjectDiagramEditorStub presenter; private AbstractProjectDiagramEditorCore<ProjectMetadata, ProjectDiagram, ProjectDiagramResource, ProjectDiagramEditorProxy<ProjectDiagramResource>> presenterCore; @Mock private DocumentationView documentationView; @Before @SuppressWarnings("unchecked") public void setup() { sessionEditorPresenters = new ManagedInstanceStub<>(sessionEditorPresenter); sessionViewerPresenters = new ManagedInstanceStub<>(sessionViewerPresenter); VersionRecordManager versionRecordManagerMock = versionRecordManager; when(versionRecordManager.getCurrentPath()).thenReturn(path); when(sessionItems.setErrorConsumer(any(Consumer.class))).thenReturn(sessionItems); when(sessionItems.setLoadingStarts(any(Command.class))).thenReturn(sessionItems); when(sessionItems.setLoadingCompleted(any(Command.class))).thenReturn(sessionItems); when(sessionItems.getCommands()).thenReturn(editorSessionCommands); when(editorSessionCommands.getValidateSessionCommand()).thenReturn(sessionValidateCommand); when(sessionEditorPresenter.getInstance()).thenReturn(fullSession); when(sessionEditorPresenter.withToolbar(anyBoolean())).thenReturn(sessionEditorPresenter); when(sessionEditorPresenter.withPalette(anyBoolean())).thenReturn(sessionEditorPresenter); when(sessionEditorPresenter.displayNotifications(any())).thenReturn(sessionEditorPresenter); when(sessionEditorPresenter.getView()).thenReturn(presenterView); doAnswer(new Answer() { @Override public Object answer(InvocationOnMock invocationOnMock) throws Throwable { Diagram diagram = (Diagram) invocationOnMock.getArguments()[0]; SessionPresenter.SessionPresenterCallback callback = (SessionPresenter.SessionPresenterCallback) invocationOnMock.getArguments()[1]; callback.onOpen(diagram); callback.afterCanvasInitialized(); callback.afterSessionOpened(); callback.onSuccess(); return null; } }).when(sessionEditorPresenter).open(any(Diagram.class), any(SessionPresenter.SessionPresenterCallback.class)); when(diagram.getMetadata()).thenReturn(metadata); when(diagram.getName()).thenReturn("Title"); when(metadata.getOverview()).thenReturn(overview); when(overview.getMetadata()).thenReturn(kieMetadata); when(fullSession.getCanvasHandler()).thenReturn(canvasHandler); when(canvasHandler.getDiagram()).thenReturn(diagram); when(resourceType.getSuffix()).thenReturn("bpmn"); when(resourceType.getShortName()).thenReturn("bpmn"); when(placeRequest.getIdentifier()).thenReturn(ProjectDiagramEditorStub.EDITOR_ID); when(diagram.getGraph()).thenReturn(graph); when(graph.getContent()).thenReturn(definitionSetContent); when(stunnerPreferencesRegistr.get(StunnerPreferences.class)).thenReturn(preferences); when(preferences.getDiagramEditorPreferences()).thenReturn(diagramEditorPreferences); this.presenter = new ProjectDiagramEditorStub(view, xmlEditorView, sessionEditorPresenters, sessionViewerPresenters, onDiagramFocusEvent, onDiagramLostFocusEvent, notificationEvent, errorPopupPresenter, diagramClientErrorHandler, documentationView, resourceType, sessionItems, projectMessagesListener, translationService, projectDiagramServices, projectDiagramResourceServiceCaller, placeManager, changeTitleNotificationEvent, savePopUpPresenter) { { docks = mock(DefaultEditorDock.class); perspectiveManager = ProjectDiagramEditorTest.this.perspectiveManager; overviewWidget = overviewWidgetMock; versionRecordManager = versionRecordManagerMock; place = placeRequest; kieView = mock(KieEditorWrapperView.class); } @Override protected AbstractProjectDiagramEditorCore<ProjectMetadata, ProjectDiagram, ProjectDiagramResource, ProjectDiagramEditorProxy<ProjectDiagramResource>> makeCore(final AbstractProjectDiagramEditor.View view, final TextEditorView xmlEditorView, final Event<NotificationEvent> notificationEvent, final ManagedInstance<SessionEditorPresenter<EditorSession>> editorSessionPresenterInstances, final ManagedInstance<SessionViewerPresenter<ViewerSession>> viewerSessionPresenterInstances, final AbstractDiagramEditorMenuSessionItems<?> menuSessionItems, final ErrorPopupPresenter errorPopupPresenter, final DiagramClientErrorHandler diagramClientErrorHandler, final ClientTranslationService translationService) { presenterCore = spy(super.makeCore(view, xmlEditorView, notificationEvent, editorSessionPresenterInstances, viewerSessionPresenterInstances, menuSessionItems, errorPopupPresenter, diagramClientErrorHandler, translationService)); return presenterCore; } }; presenter.init(); presenterCore.setEditorSessionPresenter(sessionEditorPresenter); when(translationService.getValue(anyString())).thenAnswer(i -> i.getArguments()[0]); } @Test @SuppressWarnings("unchecked") public void testInit() { verify(sessionItems, times(0)).bind(eq(fullSession)); } // TODO @Test @SuppressWarnings("unchecked") public void testValidateBeforeSave() { presenter.save(); verify(sessionValidateCommand, times(1)).execute(any(ClientSessionCommand.Callback.class)); } // TODO: @Test - versionRecordManager is not being set. @SuppressWarnings("unchecked") public void testLoadContent() { presenter.loadContent(); verify(projectDiagramServices, times(1)).getByPath(eq(path), any(ServiceCallback.class)); } @Test @SuppressWarnings("unchecked") public void testLoadContentError() { ArgumentCaptor<ServiceCallback> callbackArgumentCaptor = forClass(ServiceCallback.class); presenter.loadContent(); verify(projectDiagramServices, times(1)).getByPath(eq(path), callbackArgumentCaptor.capture()); callbackArgumentCaptor.getValue().onError(new ClientRuntimeError(new DefinitionNotFoundException())); verify(placeManager, times(1)).forceClosePlace(any(PathPlaceRequest.class)); ArgumentCaptor<Consumer> consumerArgumentCaptor = forClass(Consumer.class); verify(diagramClientErrorHandler, times(1)).handleError(any(ClientRuntimeError.class), consumerArgumentCaptor.capture()); consumerArgumentCaptor.getValue().accept("error message"); verify(errorPopupPresenter, times(1)).showMessage("error message"); } @Test public void testIsDirty() { presenter.init(); presenter.open(diagram, mock(Viewer.Callback.class)); assertFalse(presenter.isDirty(presenter.getCurrentDiagramHash())); presenter.setOriginalHash(~~(presenter.getCurrentDiagramHash() + 1)); assertTrue(presenter.isDirty(presenter.getCurrentDiagramHash())); } @Test public void testHasChanges() { presenter.init(); presenter.open(diagram, mock(Viewer.Callback.class)); assertFalse(presenter.hasUnsavedChanges()); presenter.setOriginalHash(~~(presenter.getCurrentDiagramHash() + 1)); assertTrue(presenter.hasUnsavedChanges()); presenter.setOriginalHash(~~(presenter.getCurrentDiagramHash())); assertFalse(presenter.hasUnsavedChanges()); } @Test public void testOnSaveWithoutChanges() { presenter.open(diagram, mock(Viewer.Callback.class)); when(versionRecordManager.isCurrentLatest()).thenReturn(true); presenter.onSave(); verify(presenterView).showMessage(CommonConstants.INSTANCE.NoChangesSinceLastSave()); } @Test @SuppressWarnings("unchecked") public void testOnSaveWithChanges() { presenter.open(diagram, mock(Viewer.Callback.class)); presenter.setOriginalHash(diagram.hashCode() + 1); doAnswer(i -> { ((ClientSessionCommand.Callback) i.getArguments()[0]).onSuccess(); return null; }).when(sessionValidateCommand).execute(any(ClientSessionCommand.Callback.class)); presenter.onSave(); assertOnSaveSavedDiagram(); } @Test @SuppressWarnings("unchecked") public void testOnSaveRestore() { presenter.open(diagram, mock(Viewer.Callback.class)); doAnswer(i -> { ((ClientSessionCommand.Callback) i.getArguments()[0]).onSuccess(); return null; }).when(sessionValidateCommand).execute(any(ClientSessionCommand.Callback.class)); presenter.onSave(); assertOnSaveSavedDiagram(); } private void assertOnSaveSavedDiagram() { verify(savePopUpPresenter).show(eq(path), savePopupCommandCaptor.capture()); final ParameterizedCommand<String> savePopupCommand = savePopupCommandCaptor.getValue(); savePopupCommand.execute(SAVE_MESSAGE); verify(projectDiagramServices).saveOrUpdate(eq(path), eq(diagram), any(Metadata.class), eq(SAVE_MESSAGE), serviceCallbackCaptor.capture()); } // TODO @Test public void testSaveWithCommitMessageOnSuccess() { presenter.save(SAVE_MESSAGE); verify(view).showSaving(); verify(projectDiagramServices).saveOrUpdate(eq(path), any(ProjectDiagramImpl.class), any(Metadata.class), eq(SAVE_MESSAGE), serviceCallbackCaptor.capture()); final ServiceCallback<ProjectDiagram> serviceCallback = serviceCallbackCaptor.getValue(); final ProjectDiagramImpl diagram = mock(ProjectDiagramImpl.class); serviceCallback.onSuccess(diagram); verify(view).hideBusyIndicator(); verify(versionRecordManager).reloadVersions(eq(path)); verify(presenterView).showMessage(StunnerProjectClientConstants.DIAGRAM_SAVE_SUCCESSFUL); } // TODO @Test public void testSaveWithCommitMessageOnError() { presenter.save(SAVE_MESSAGE); verify(view).showSaving(); verify(projectDiagramServices).saveOrUpdate(eq(path), any(ProjectDiagramImpl.class), any(Metadata.class), eq(SAVE_MESSAGE), serviceCallbackCaptor.capture()); final ServiceCallback<ProjectDiagram> serviceCallback = serviceCallbackCaptor.getValue(); final ClientRuntimeError error = mock(ClientRuntimeError.class); serviceCallback.onError(error); verify(diagramClientErrorHandler).handleError(eq(error), consumerCaptor.capture()); final Consumer<String> consumer = consumerCaptor.getValue(); consumer.accept(ERROR_MESSAGE); verify(errorPopupPresenter).showMessage(ERROR_MESSAGE); } @Test public void testShowLoadingViews() { presenter.showLoadingViews(); verify(view).showLoading(); } @Test public void testShowSavingViews() { presenter.showSavingViews(); verify(view).showSaving(); } @Test public void testHideLoadingViews() { presenter.hideLoadingViews(); verify(view).hideBusyIndicator(); } }
/* * Copyright (c) Microsoft. All rights reserved. * Licensed under the MIT license. See LICENSE file in the project root for full license information. */ package com.microsoft.azure.iot.service.transport.amqps; import com.microsoft.azure.iot.service.sdk.Message; import mockit.Expectations; import mockit.Mocked; import mockit.integration.junit4.JMockit; import org.apache.qpid.proton.Proton; import org.apache.qpid.proton.engine.Connection; import org.apache.qpid.proton.engine.Event; import org.apache.qpid.proton.engine.Session; import org.apache.qpid.proton.reactor.Reactor; import org.junit.Test; import org.junit.runner.RunWith; import java.io.IOException; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; /** Unit tests for AmqpSend */ @RunWith(JMockit.class) public class AmqpSendTest { @Mocked Proton proton; @Mocked Reactor reactor; @Mocked Event event; @Mocked Connection connection; @Mocked Session session; // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_002: [The constructor shall copy all input parameters to private member variables for event processing] @Test public void constructor_copies_params_to_members() { // Arrange String hostName = "aaa"; String userName = "bbb"; String sasToken = "ccc"; // Act AmqpSend amqpSend = new AmqpSend(hostName, userName, sasToken); String _hostName = amqpSend.hostName; String _userName = amqpSend.userName; String _sasToken = amqpSend.sasToken; // Assert assertEquals(hostName, _hostName); assertEquals(userName, _userName); assertEquals(sasToken, _sasToken); } // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_001: [The constructor shall throw IllegalArgumentException if any of the input parameter is null or empty] // Assert @Test (expected = IllegalArgumentException.class) public void constructor_checks_if_hostName_null() { // Arrange String hostName = null; String userName = "bbb"; String sasToken = "ccc"; // Act AmqpSend amqpSend = new AmqpSend(hostName, userName, sasToken); } // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_001: [The constructor shall throw IllegalArgumentException if any of the input parameter is null or empty] // Assert @Test (expected = IllegalArgumentException.class) public void constructor_checks_if_hostName_empty() { // Arrange String hostName = ""; String userName = "bbb"; String sasToken = "ccc"; // Act AmqpSend amqpSend = new AmqpSend(hostName, userName, sasToken); } // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_001: [The constructor shall throw IllegalArgumentException if any of the input parameter is null or empty] // Assert @Test (expected = IllegalArgumentException.class) public void constructor_checks_if_userName_null() { // Arrange String hostName = "aaa"; String userName = null; String sasToken = "ccc"; // Act AmqpSend amqpSend = new AmqpSend(hostName, userName, sasToken); } // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_001: [The constructor shall throw IllegalArgumentException if any of the input parameter is null or empty] // Assert @Test (expected = IllegalArgumentException.class) public void constructor_checks_if_userName_empty() { // Arrange String hostName = "aaa"; String userName = ""; String sasToken = "ccc"; // Act AmqpSend amqpSend = new AmqpSend(hostName, userName, sasToken); } // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_001: [The constructor shall throw IllegalArgumentException if any of the input parameter is null or empty] // Assert @Test (expected = IllegalArgumentException.class) public void constructor_checks_if_sasToken_null() { // Arrange String hostName = "aaa"; String userName = "bbb"; String sasToken = null; // Act AmqpSend amqpSend = new AmqpSend(hostName, userName, sasToken); } // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_001: [The constructor shall throw IllegalArgumentException if any of the input parameter is null or empty] // Assert @Test (expected = IllegalArgumentException.class) public void constructor_checks_if_sasToken_empty() { // Arrange String hostName = "aaa"; String userName = "bbb"; String sasToken = ""; // Act AmqpSend amqpSend = new AmqpSend(hostName, userName, sasToken); } // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_003: [The event handler shall set the member AmqpsSendHandler object to handle the given connection events] // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_004: [The event handler shall create an AmqpsSendHandler object to handle reactor events] @Test public void onReactorInit_creates_SendHandler() throws IOException { // Arrange String hostName = "aaa"; String userName = "bbb"; String sasToken = "ccc"; AmqpSend amqpSend = new AmqpSend(hostName, userName, sasToken); amqpSend.open(); // Assert new Expectations() { { reactor = event.getReactor(); connection = reactor.connection(amqpSend.amqpSendHandler); } }; // Act amqpSend.onReactorInit(event); } // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_005: [The event handler shall invalidate the member AmqpsSendHandler object] @Test public void onReactorInit_invalidates_SendHandler() throws IOException { // Arrange String hostName = "aaa"; String userName = "bbb"; String sasToken = "ccc"; AmqpSend amqpSend = new AmqpSend(hostName, userName, sasToken); amqpSend.open(); amqpSend.onReactorInit(event); // Act amqpSend.close(); // Assert assertNull(amqpSend.amqpSendHandler); } // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_006: [The event handler shall create a Proton message with the given content] @Test public void send_creates_ProtonMessage() throws Exception { // Arrange String hostName = "aaa"; String userName = "bbb"; String sasToken = "ccc"; String deviceId = "deviceId"; String content = "abcdefghijklmnopqrst"; Message message = new Message(content); AmqpSend amqpSend = new AmqpSend(hostName, userName, sasToken); amqpSend.open(); // Assert new Expectations() { { amqpSend.amqpSendHandler.createProtonMessage(deviceId, message); } }; // Act amqpSend.send(deviceId, message); } // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_007: [The event handler shall initialize the Proton reactor object] // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_008: [The event handler shall start the Proton reactor object] @Test public void send_initializes_Reactor() throws Exception { // Arrange String hostName = "aaa"; String userName = "bbb"; String sasToken = "ccc"; String deviceId = "deviceId"; String content = "abcdefghijklmnopqrst"; Message message = new Message(content); AmqpSend amqpSend = new AmqpSend(hostName, userName, sasToken); amqpSend.open(); // Assert new Expectations() { { reactor = proton.reactor(amqpSend); reactor.run(); } }; // Act amqpSend.send(deviceId, message); } // Tests_SRS_SERVICE_SDK_JAVA_AMQPSEND_12_009: [The event handler shall throw IOException if the send handler object is not initialized] // Assert @Test (expected = IOException.class) public void send_throwsIOException_when_open_has_not_been_called() throws Exception { // Arrange String hostName = "aaa"; String userName = "bbb"; String sasToken = "ccc"; String deviceId = "deviceId"; String content = "abcdefghijklmnopqrst"; Message message = new Message(content); AmqpSend amqpSend = new AmqpSend(hostName, userName, sasToken); // Act amqpSend.send(deviceId, message); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.generator.openapi; import java.util.Arrays; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.MethodSpec.Builder; import io.apicurio.datamodels.openapi.models.OasOperation; import io.apicurio.datamodels.openapi.models.OasPathItem; import io.apicurio.datamodels.openapi.models.OasPaths; import io.apicurio.datamodels.openapi.v2.models.Oas20Document; import io.apicurio.datamodels.openapi.v2.models.Oas20Parameter; import io.apicurio.datamodels.openapi.v3.models.Oas30Document; import io.apicurio.datamodels.openapi.v3.models.Oas30Parameter; import io.apicurio.datamodels.openapi.v3.models.Oas30ParameterDefinition; import io.apicurio.datamodels.openapi.v3.models.Oas30Schema; import org.apache.camel.generator.openapi.PathVisitor.HttpMethod; import org.junit.jupiter.api.Test; import static org.assertj.core.api.Assertions.assertThat; public class OperationVisitorTest { @Test public void shouldEmitCodeForOas2ParameterInQuery() { final Builder method = MethodSpec.methodBuilder("configure"); final MethodBodySourceCodeEmitter emitter = new MethodBodySourceCodeEmitter(method); final OperationVisitor<?> visitor = new OperationVisitor<>(emitter, null, null, null); final Oas20Parameter parameter = new Oas20Parameter("param"); parameter.in = "query"; visitor.emit(parameter); assertThat(method.build().toString()).isEqualTo("void configure() {\n" + " param()\n" + " .name(\"param\")\n" + " .type(org.apache.camel.model.rest.RestParamType.query)\n" + " .required(false)\n" + " .endParam()}\n"); } @Test public void shouldEmitCodeForOas32arameterInPath() { final Builder method = MethodSpec.methodBuilder("configure"); final MethodBodySourceCodeEmitter emitter = new MethodBodySourceCodeEmitter(method); final OperationVisitor<?> visitor = new OperationVisitor<>(emitter, new OperationFilter(), "/path/{param}", new DirectToOperationId()); final Oas20Document document = new Oas20Document(); final OasPaths paths = document.createPaths(); final OasPathItem path = paths.addPathItem("", paths.createPathItem("/path/{param}")); final OasOperation operation = path.createOperation("get"); final Oas20Parameter parameter = new Oas20Parameter("param"); parameter.in = "path"; path.addParameter(parameter); visitor.visit(HttpMethod.GET, operation); assertThat(method.build().toString()).isEqualTo("void configure() {\n" + " get(\"/path/{param}\")\n" + " .param()\n" + " .name(\"param\")\n" + " .type(org.apache.camel.model.rest.RestParamType.path)\n" + " .required(false)\n" + " .endParam()\n" + " .to(\"direct:rest1\")}\n"); } @Test public void shouldEmitCodeForOas3ParameterInPath() { final Builder method = MethodSpec.methodBuilder("configure"); final MethodBodySourceCodeEmitter emitter = new MethodBodySourceCodeEmitter(method); final OperationVisitor<?> visitor = new OperationVisitor<>(emitter, new OperationFilter(), "/path/{param}", new DirectToOperationId()); final Oas30Document document = new Oas30Document(); final OasPaths paths = document.createPaths(); final OasPathItem path = paths.addPathItem("", paths.createPathItem("/path/{param}")); final OasOperation operation = path.createOperation("get"); final Oas30Parameter parameter = new Oas30Parameter("param"); parameter.in = "path"; path.addParameter(parameter); visitor.visit(HttpMethod.GET, operation); assertThat(method.build().toString()).isEqualTo("void configure() {\n" + " get(\"/path/{param}\")\n" + " .param()\n" + " .name(\"param\")\n" + " .type(org.apache.camel.model.rest.RestParamType.path)\n" + " .required(false)\n" + " .endParam()\n" + " .to(\"direct:rest1\")}\n"); } @Test public void shouldEmitCodeForOas3ParameterWithDefaultValue() { final Builder method = MethodSpec.methodBuilder("configure"); final MethodBodySourceCodeEmitter emitter = new MethodBodySourceCodeEmitter(method); final OperationVisitor<?> visitor = new OperationVisitor<>(emitter, null, null, null); final Oas30Parameter parameter = new Oas30Parameter("param"); parameter.in = "path"; parameter.schema = parameter.createSchema(); ((Oas30Schema) parameter.schema).default_ = "default"; visitor.emit(parameter); assertThat(method.build().toString()).isEqualTo("void configure() {\n" + " param()\n" + " .name(\"param\")\n" + " .type(org.apache.camel.model.rest.RestParamType.path)\n" + " .defaultValue(\"default\")\n" + " .required(false)\n" + " .endParam()}\n"); } @Test public void shouldEmitCodeForOas3ParameterWithEnum() { final Builder method = MethodSpec.methodBuilder("configure"); final MethodBodySourceCodeEmitter emitter = new MethodBodySourceCodeEmitter(method); final OperationVisitor<?> visitor = new OperationVisitor<>(emitter, null, null, null); final Oas30Parameter parameter = new Oas30Parameter("param"); parameter.in = "query"; parameter.schema = parameter.createSchema(); ((Oas30Schema) parameter.schema).enum_ = Arrays.asList("one", "two", "three"); visitor.emit(parameter); assertThat(method.build().toString()).isEqualTo("void configure() {\n" + " param()\n" + " .name(\"param\")\n" + " .type(org.apache.camel.model.rest.RestParamType.query)\n" + " .allowableValues(\"one,two,three\")\n" + " .required(false)\n" + " .endParam()}\n"); } @Test public void shouldEmitCodeForOas3ParameterWithType() { final Builder method = MethodSpec.methodBuilder("configure"); final MethodBodySourceCodeEmitter emitter = new MethodBodySourceCodeEmitter(method); final OperationVisitor<?> visitor = new OperationVisitor<>(emitter, null, null, null); final Oas30Parameter parameter = new Oas30Parameter("param"); parameter.in = "query"; parameter.schema = parameter.createSchema(); ((Oas30Schema) parameter.schema).type = "integer"; visitor.emit(parameter); assertThat(method.build().toString()).isEqualTo("void configure() {\n" + " param()\n" + " .name(\"param\")\n" + " .type(org.apache.camel.model.rest.RestParamType.query)\n" + " .dataType(\"integer\")\n" + " .required(false)\n" + " .endParam()}\n"); } @Test public void shouldEmitCodeForOas3PathParameter() { final Builder method = MethodSpec.methodBuilder("configure"); final MethodBodySourceCodeEmitter emitter = new MethodBodySourceCodeEmitter(method); final OperationVisitor<?> visitor = new OperationVisitor<>(emitter, null, null, null); final Oas30Parameter parameter = new Oas30Parameter("param"); parameter.in = "path"; visitor.emit(parameter); assertThat(method.build().toString()).isEqualTo("void configure() {\n" + " param()\n" + " .name(\"param\")\n" + " .type(org.apache.camel.model.rest.RestParamType.path)\n" + " .required(false)\n" + " .endParam()}\n"); } @Test public void shouldEmitCodeForOas3RefParameters() { final Builder method = MethodSpec.methodBuilder("configure"); final MethodBodySourceCodeEmitter emitter = new MethodBodySourceCodeEmitter(method); final OperationVisitor<?> visitor = new OperationVisitor<>(emitter, null, null, null); final Oas30Document document = new Oas30Document(); document.components = document.createComponents(); final Oas30ParameterDefinition parameterDefinition = new Oas30ParameterDefinition("param"); parameterDefinition.in = "query"; document.components.addParameterDefinition("param", parameterDefinition); final Oas30Parameter parameter = new Oas30Parameter(); parameter._ownerDocument = document; parameter.$ref = "#/components/parameters/param"; visitor.emit(parameter); assertThat(method.build().toString()).isEqualTo("void configure() {\n" + " param()\n" + " .name(\"param\")\n" + " .type(org.apache.camel.model.rest.RestParamType.query)\n" + " .required(false)\n" + " .endParam()}\n"); } }
package org.zstack.storage.primary.smp; import org.springframework.beans.factory.annotation.Autowire; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Configurable; import org.springframework.transaction.annotation.Transactional; import org.zstack.core.asyncbatch.AsyncBatchRunner; import org.zstack.core.asyncbatch.LoopAsyncBatch; import org.zstack.core.componentloader.PluginRegistry; import org.zstack.core.db.SimpleQuery; import org.zstack.core.db.SimpleQuery.Op; import org.zstack.header.cluster.ClusterVO; import org.zstack.header.cluster.ClusterVO_; import org.zstack.header.core.Completion; import org.zstack.header.core.NoErrorCompletion; import org.zstack.header.core.ReturnValueCompletion; import org.zstack.header.errorcode.ErrorCode; import org.zstack.header.errorcode.OperationFailureException; import org.zstack.header.exception.CloudRuntimeException; import org.zstack.header.host.*; import org.zstack.header.message.Message; import org.zstack.header.storage.primary.*; import org.zstack.header.storage.primary.VolumeSnapshotCapability.VolumeSnapshotArrangementType; import org.zstack.header.storage.snapshot.VolumeSnapshotInventory; import org.zstack.header.volume.VolumeFormat; import org.zstack.header.volume.VolumeVO; import org.zstack.header.volume.VolumeVO_; import org.zstack.storage.primary.PrimaryStorageBase; import org.zstack.storage.primary.PrimaryStorageCapacityRecalculator; import org.zstack.storage.primary.PrimaryStorageCapacityUpdater; import org.zstack.utils.Utils; import org.zstack.utils.logging.CLogger; import static org.zstack.core.Platform.operr; import javax.persistence.TypedQuery; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; /** * Created by xing5 on 2016/3/26. */ @Configurable(preConstruction = true, autowire = Autowire.BY_TYPE) public class SMPPrimaryStorageBase extends PrimaryStorageBase { private static final CLogger logger = Utils.getLogger(SMPPrimaryStorageBase.class); @Autowired private PluginRegistry pluginRgty; public SMPPrimaryStorageBase(PrimaryStorageVO self) { super(self); } private HypervisorFactory getHypervisorFactoryByHypervisorType(String hvType) { for (HypervisorFactory f : pluginRgty.getExtensionList(HypervisorFactory.class)) { if (hvType.equals(f.getHypervisorType())) { return f; } } throw new CloudRuntimeException(String.format("cannot find HypervisorFactory[type = %s]", hvType)); } protected HypervisorFactory getHypervisorFactoryByHostUuid(String huuid) { SimpleQuery<HostVO> q = dbf.createQuery(HostVO.class); q.select(HostVO_.hypervisorType); q.add(HostVO_.uuid, Op.EQ, huuid); String hvType = q.findValue(); return getHypervisorFactoryByHypervisorType(hvType); } protected HypervisorFactory getHypervisorFactoryByClusterUuid(String cuuid) { SimpleQuery<ClusterVO> q = dbf.createQuery(ClusterVO.class); q.select(ClusterVO_.hypervisorType); q.add(ClusterVO_.uuid, Op.EQ, cuuid); String hvType = q.findValue(); return getHypervisorFactoryByHypervisorType(hvType); } @Override public void attachHook(String clusterUuid, final Completion completion) { HypervisorBackend bkd = getHypervisorFactoryByClusterUuid(clusterUuid).getHypervisorBackend(self); bkd.attachHook(clusterUuid, completion); } @Override protected void handle(final InstantiateVolumeOnPrimaryStorageMsg msg) { if (msg.getDestHost() == null) { String hostUuid = getAvailableHostUuidForOperation(); if (hostUuid == null) { throw new OperationFailureException(operr("the shared mount point primary storage[uuid:%s, name:%s] cannot find any " + "available host in attached clusters for instantiating the volume", self.getUuid(), self.getName())); } msg.setDestHost(HostInventory.valueOf(dbf.findByUuid(hostUuid, HostVO.class))); } HypervisorFactory f = getHypervisorFactoryByHostUuid(msg.getDestHost().getUuid()); HypervisorBackend bkd = f.getHypervisorBackend(self); bkd.handle(msg, new ReturnValueCompletion<InstantiateVolumeOnPrimaryStorageReply>(msg) { @Override public void success(InstantiateVolumeOnPrimaryStorageReply reply) { bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { InstantiateVolumeOnPrimaryStorageReply reply = new InstantiateVolumeOnPrimaryStorageReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } @Override protected void handle(final DeleteVolumeOnPrimaryStorageMsg msg) { HypervisorType type = VolumeFormat.getMasterHypervisorTypeByVolumeFormat(msg.getVolume().getFormat()); HypervisorFactory f = getHypervisorFactoryByHypervisorType(type.toString()); final HypervisorBackend bkd = f.getHypervisorBackend(self); bkd.handle(msg, new ReturnValueCompletion<DeleteVolumeOnPrimaryStorageReply>(msg) { @Override public void success(DeleteVolumeOnPrimaryStorageReply reply) { logger.debug( String.format("successfully delete volume[uuid:%s]", msg.getVolume().getUuid())); bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { logger.debug( String.format("can't delete volume[uuid:%s] right now, add a GC job", msg.getVolume().getUuid())); SMPDeleteVolumeGC gc = new SMPDeleteVolumeGC(); gc.NAME = String.format("gc-smp-%s-volume-%s", self.getUuid(), msg.getVolume()); gc.primaryStorageUuid = self.getUuid(); gc.hypervisorType = type.toString(); gc.volume = msg.getVolume(); gc.submit(SMPPrimaryStorageGlobalConfig.GC_INTERVAL.value(Long.class), TimeUnit.SECONDS); DeleteVolumeOnPrimaryStorageReply reply = new DeleteVolumeOnPrimaryStorageReply(); bus.reply(msg, reply); } }); } @Override protected void handle(final CreateTemplateFromVolumeOnPrimaryStorageMsg msg) { HypervisorType type = VolumeFormat.getMasterHypervisorTypeByVolumeFormat(msg.getVolumeInventory().getFormat()); HypervisorFactory f = getHypervisorFactoryByHypervisorType(type.toString()); HypervisorBackend bkd = f.getHypervisorBackend(self); bkd.handle(msg, new ReturnValueCompletion<CreateTemplateFromVolumeOnPrimaryStorageReply>(msg) { @Override public void success(CreateTemplateFromVolumeOnPrimaryStorageReply reply) { bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { CreateTemplateFromVolumeOnPrimaryStorageReply reply = new CreateTemplateFromVolumeOnPrimaryStorageReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } @Override protected void handle(final DownloadDataVolumeToPrimaryStorageMsg msg) { HypervisorType type = VolumeFormat.getMasterHypervisorTypeByVolumeFormat(msg.getImage().getFormat()); HypervisorFactory f = getHypervisorFactoryByHypervisorType(type.toString()); HypervisorBackend bkd = f.getHypervisorBackend(self); bkd.handle(msg, new ReturnValueCompletion<DownloadDataVolumeToPrimaryStorageReply>(msg) { @Override public void success(DownloadDataVolumeToPrimaryStorageReply reply) { bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { DownloadDataVolumeToPrimaryStorageReply reply = new DownloadDataVolumeToPrimaryStorageReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } @Override protected void handle(final DeleteBitsOnPrimaryStorageMsg msg) { HypervisorFactory f = getHypervisorFactoryByHypervisorType(msg.getHypervisorType()); HypervisorBackend bkd = f.getHypervisorBackend(self); bkd.handle(msg, new ReturnValueCompletion<DeleteBitsOnPrimaryStorageReply>(msg) { @Override public void success(DeleteBitsOnPrimaryStorageReply reply) { bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { DeleteBitsOnPrimaryStorageReply reply = new DeleteBitsOnPrimaryStorageReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } @Override protected void handle(final DownloadIsoToPrimaryStorageMsg msg) { HypervisorFactory f = getHypervisorFactoryByHostUuid(msg.getDestHostUuid()); HypervisorBackend bkd = f.getHypervisorBackend(self); bkd.handle(msg, new ReturnValueCompletion<DownloadIsoToPrimaryStorageReply>(msg) { @Override public void success(DownloadIsoToPrimaryStorageReply reply) { bus.reply(msg, reply); } @Override public void fail(ErrorCode error) { DownloadIsoToPrimaryStorageReply reply = new DownloadIsoToPrimaryStorageReply(); reply.setError(error); bus.reply(msg, reply); } }); } @Override protected void handle(final DeleteIsoFromPrimaryStorageMsg msg) { HypervisorType type = VolumeFormat.getMasterHypervisorTypeByVolumeFormat(msg.getIsoSpec().getInventory().getFormat()); HypervisorFactory f = getHypervisorFactoryByHypervisorType(type.toString()); HypervisorBackend bkd = f.getHypervisorBackend(self); bkd.handle(msg, new ReturnValueCompletion<DeleteIsoFromPrimaryStorageReply>(msg) { @Override public void success(DeleteIsoFromPrimaryStorageReply reply) { bus.reply(msg, reply); } @Override public void fail(ErrorCode error) { DeleteIsoFromPrimaryStorageReply reply = new DeleteIsoFromPrimaryStorageReply(); reply.setError(error); bus.reply(msg, reply); } }); } @Override protected void handle(AskVolumeSnapshotCapabilityMsg msg) { AskVolumeSnapshotCapabilityReply reply = new AskVolumeSnapshotCapabilityReply(); VolumeSnapshotCapability capability = new VolumeSnapshotCapability(); capability.setSupport(true); capability.setArrangementType(VolumeSnapshotArrangementType.CHAIN); reply.setCapability(capability); bus.reply(msg, reply); } @Override protected void handle(final SyncVolumeSizeOnPrimaryStorageMsg msg) { SimpleQuery<VolumeVO> q = dbf.createQuery(VolumeVO.class); q.select(VolumeVO_.format); q.add(VolumeVO_.uuid, Op.EQ, msg.getVolumeUuid()); String format = q.findValue(); HypervisorType type = VolumeFormat.getMasterHypervisorTypeByVolumeFormat(format); HypervisorFactory f = getHypervisorFactoryByHypervisorType(type.toString()); HypervisorBackend bkd = f.getHypervisorBackend(self); bkd.handle(msg, new ReturnValueCompletion<SyncVolumeSizeOnPrimaryStorageReply>(msg) { @Override public void success(SyncVolumeSizeOnPrimaryStorageReply returnValue) { bus.reply(msg, returnValue); } @Override public void fail(ErrorCode errorCode) { SyncVolumeSizeOnPrimaryStorageReply reply = new SyncVolumeSizeOnPrimaryStorageReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } @Override protected void connectHook(ConnectParam param, final Completion completion) { SimpleQuery<PrimaryStorageClusterRefVO> q = dbf.createQuery(PrimaryStorageClusterRefVO.class); q.select(PrimaryStorageClusterRefVO_.clusterUuid); q.add(PrimaryStorageClusterRefVO_.primaryStorageUuid, Op.EQ, self.getUuid()); final List<String> clusterUuids = q.listValue(); if (clusterUuids.isEmpty()) { completion.success(); return; } new LoopAsyncBatch<String>(completion) { boolean success; @Override protected Collection<String> collect() { return clusterUuids; } @Override protected AsyncBatchRunner forEach(String item) { return new AsyncBatchRunner() { @Override public void run(NoErrorCompletion completion) { HypervisorBackend bkd = getHypervisorFactoryByClusterUuid(item).getHypervisorBackend(self); bkd.connectByClusterUuid(item, new Completion(completion) { @Override public void success() { success = true; completion.done(); } @Override public void fail(ErrorCode errorCode) { errors.add(errorCode); completion.done(); } }); } }; } @Override protected void done() { if (success) { completion.success(); } else { completion.fail(errf.stringToOperationError( String.format("failed to connect to all clusters%s", clusterUuids), errors )); } } }.start(); } @Override protected void pingHook(Completion completion) { completion.success(); } @Override protected void syncPhysicalCapacity(ReturnValueCompletion<PhysicalCapacityUsage> completion) { completion.fail(operr("not supported operation")); } @Override public void handleLocalMessage(Message msg) { if (msg instanceof TakeSnapshotMsg) { handle((TakeSnapshotMsg) msg); } else if (msg instanceof BackupVolumeSnapshotFromPrimaryStorageToBackupStorageMsg) { handle((BackupVolumeSnapshotFromPrimaryStorageToBackupStorageMsg) msg); } else if (msg instanceof CreateVolumeFromVolumeSnapshotOnPrimaryStorageMsg) { handle((CreateVolumeFromVolumeSnapshotOnPrimaryStorageMsg) msg); } else if (msg instanceof SMPPrimaryStorageHypervisorSpecificMessage) { handle((SMPPrimaryStorageHypervisorSpecificMessage) msg); } else if (msg instanceof UploadBitsToBackupStorageMsg) { handle((UploadBitsToBackupStorageMsg) msg); } else if (msg instanceof CreateTemporaryVolumeFromSnapshotMsg) { handle((CreateTemporaryVolumeFromSnapshotMsg) msg); } else if (msg instanceof SMPRecalculatePrimaryStorageCapacityMsg) { handle((SMPRecalculatePrimaryStorageCapacityMsg) msg); } else { super.handleLocalMessage(msg); } } protected void handle(SMPRecalculatePrimaryStorageCapacityMsg msg) { if (msg.isRelease()) { doReleasePrimaryStorageCapacity(); } else { RecalculatePrimaryStorageCapacityMsg rmsg = new RecalculatePrimaryStorageCapacityMsg(); rmsg.setPrimaryStorageUuid(self.getUuid()); bus.makeLocalServiceId(rmsg, PrimaryStorageConstant.SERVICE_ID); bus.send(rmsg); } } private void doReleasePrimaryStorageCapacity() { PrimaryStorageCapacityUpdater updater = new PrimaryStorageCapacityUpdater(self.getUuid()); updater.run(new PrimaryStorageCapacityUpdaterRunnable() { @Override public PrimaryStorageCapacityVO call(PrimaryStorageCapacityVO cap) { cap.setAvailableCapacity(0L); cap.setTotalCapacity(0L); cap.setTotalPhysicalCapacity(0L); cap.setAvailablePhysicalCapacity(0L); cap.setSystemUsedCapacity(0L); return cap; } }); } private void handle(final CreateTemporaryVolumeFromSnapshotMsg msg) { HypervisorFactory f = getHypervisorFactoryByHypervisorType(msg.getHypervisorType()); HypervisorBackend bkd = f.getHypervisorBackend(self); bkd.handle(msg, new ReturnValueCompletion<CreateTemporaryVolumeFromSnapshotReply>(msg) { @Override public void success(CreateTemporaryVolumeFromSnapshotReply returnValue) { bus.reply(msg, returnValue); } @Override public void fail(ErrorCode errorCode) { CreateTemporaryVolumeFromSnapshotReply reply = new CreateTemporaryVolumeFromSnapshotReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } private void handle(final UploadBitsToBackupStorageMsg msg) { HypervisorFactory f = getHypervisorFactoryByHypervisorType(msg.getHypervisorType()); HypervisorBackend bkd = f.getHypervisorBackend(self); bkd.handle(msg, new ReturnValueCompletion<UploadBitsToBackupStorageReply>(msg) { @Override public void success(UploadBitsToBackupStorageReply reply) { bus.reply(msg, reply); } @Override public void fail(ErrorCode errorCode) { UploadBitsToBackupStorageReply reply = new UploadBitsToBackupStorageReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } private void handle(SMPPrimaryStorageHypervisorSpecificMessage msg) { HypervisorFactory f = getHypervisorFactoryByHypervisorType(msg.getHypervisorType()); HypervisorBackend bkd = f.getHypervisorBackend(self); bkd.handleHypervisorSpecificMessage(msg); } protected void handle(final MergeVolumeSnapshotOnPrimaryStorageMsg msg) { HypervisorBackend bkd = getHypervisorBackendByVolumeUuid(msg.getTo().getUuid()); bkd.handle(msg, new ReturnValueCompletion<MergeVolumeSnapshotOnPrimaryStorageReply>(msg) { @Override public void success(MergeVolumeSnapshotOnPrimaryStorageReply returnValue) { bus.reply(msg, returnValue); } @Override public void fail(ErrorCode errorCode) { MergeVolumeSnapshotOnPrimaryStorageReply reply = new MergeVolumeSnapshotOnPrimaryStorageReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } private void handle(final CreateVolumeFromVolumeSnapshotOnPrimaryStorageMsg msg) { HypervisorBackend bkd = getHypervisorBackendByVolumeUuid(msg.getSnapshot().getVolumeUuid()); bkd.handle(msg, new ReturnValueCompletion<CreateVolumeFromVolumeSnapshotOnPrimaryStorageReply>(msg) { @Override public void success(CreateVolumeFromVolumeSnapshotOnPrimaryStorageReply returnValue) { bus.reply(msg, returnValue); } @Override public void fail(ErrorCode errorCode) { CreateVolumeFromVolumeSnapshotOnPrimaryStorageReply reply = new CreateVolumeFromVolumeSnapshotOnPrimaryStorageReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } private void handle(BackupVolumeSnapshotFromPrimaryStorageToBackupStorageMsg msg) { HypervisorBackend bkd = getHypervisorBackendByVolumeUuid(msg.getSnapshot().getVolumeUuid()); bkd.handle(msg, new ReturnValueCompletion<BackupVolumeSnapshotFromPrimaryStorageToBackupStorageReply>(msg) { @Override public void success(BackupVolumeSnapshotFromPrimaryStorageToBackupStorageReply returnValue) { bus.reply(msg, returnValue); } @Override public void fail(ErrorCode errorCode) { BackupVolumeSnapshotFromPrimaryStorageToBackupStorageReply reply = new BackupVolumeSnapshotFromPrimaryStorageToBackupStorageReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } protected void handle(final RevertVolumeFromSnapshotOnPrimaryStorageMsg msg) { HypervisorBackend bkd = getHypervisorBackendByVolumeUuid(msg.getVolume().getUuid()); bkd.handle(msg, new ReturnValueCompletion<RevertVolumeFromSnapshotOnPrimaryStorageReply>(msg) { @Override public void success(RevertVolumeFromSnapshotOnPrimaryStorageReply returnValue) { bus.reply(msg, returnValue); } @Override public void fail(ErrorCode errorCode) { RevertVolumeFromSnapshotOnPrimaryStorageReply reply = new RevertVolumeFromSnapshotOnPrimaryStorageReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } protected void handle(final ReInitRootVolumeFromTemplateOnPrimaryStorageMsg msg) { HypervisorBackend bkd = getHypervisorBackendByVolumeUuid(msg.getVolume().getUuid()); bkd.handle(msg, new ReturnValueCompletion<ReInitRootVolumeFromTemplateOnPrimaryStorageReply>(msg) { @Override public void success(ReInitRootVolumeFromTemplateOnPrimaryStorageReply returnValue) { bus.reply(msg, returnValue); } @Override public void fail(ErrorCode errorCode) { ReInitRootVolumeFromTemplateOnPrimaryStorageReply reply = new ReInitRootVolumeFromTemplateOnPrimaryStorageReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } @Override protected void handle(final DeleteSnapshotOnPrimaryStorageMsg msg) { HypervisorBackend bkd = getHypervisorBackendByVolumeUuid(msg.getSnapshot().getVolumeUuid()); bkd.handle(msg, new ReturnValueCompletion<DeleteSnapshotOnPrimaryStorageReply>(msg) { @Override public void success(DeleteSnapshotOnPrimaryStorageReply returnValue) { bus.reply(msg, returnValue); } @Override public void fail(ErrorCode errorCode) { DeleteSnapshotOnPrimaryStorageReply reply = new DeleteSnapshotOnPrimaryStorageReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } @Transactional(readOnly = true) private String getAvailableHostUuidForOperation() { String sql = "select host.uuid from PrimaryStorageClusterRefVO ref, HostVO host where" + " ref.clusterUuid = host.clusterUuid and ref.primaryStorageUuid = :psUuid and host.status = :hstatus" + " and host.state = :hstate"; TypedQuery<String> q = dbf.getEntityManager().createQuery(sql, String.class); q.setParameter("psUuid", self.getUuid()); q.setParameter("hstatus", HostStatus.Connected); q.setParameter("hstate", HostState.Enabled); List<String> hostUuids = q.getResultList(); if (hostUuids.isEmpty()) { return null; } Collections.shuffle(hostUuids); return hostUuids.get(0); } protected HypervisorBackend getHypervisorBackendByVolumeUuid(String volUuid) { SimpleQuery<VolumeVO> q = dbf.createQuery(VolumeVO.class); q.select(VolumeVO_.format); q.add(VolumeVO_.uuid, Op.EQ, volUuid); String format = q.findValue(); if (format == null) { throw new CloudRuntimeException(String.format("cannot find the volume[uuid:%s]", volUuid)); } HypervisorType type = VolumeFormat.getMasterHypervisorTypeByVolumeFormat(format); HypervisorFactory f = getHypervisorFactoryByHypervisorType(type.toString()); return f.getHypervisorBackend(self); } private void handle(final TakeSnapshotMsg msg) { final VolumeSnapshotInventory sp = msg.getStruct().getCurrent(); HypervisorBackend bkd = getHypervisorBackendByVolumeUuid(sp.getVolumeUuid()); bkd.handle(msg, new ReturnValueCompletion<TakeSnapshotReply>(msg) { @Override public void success(TakeSnapshotReply returnValue) { bus.reply(msg, returnValue); } @Override public void fail(ErrorCode errorCode) { TakeSnapshotReply reply = new TakeSnapshotReply(); reply.setError(errorCode); bus.reply(msg, reply); } }); } }
/* * Copyright (c) 2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.oauth2.token; import org.apache.amber.oauth2.common.error.OAuthError; import org.apache.amber.oauth2.common.message.types.GrantType; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.oltu.openidconnect.as.util.OIDCAuthzServerUtil; import org.wso2.carbon.identity.base.IdentityException; import org.wso2.carbon.identity.core.model.OAuthAppDO; import org.wso2.carbon.identity.oauth.cache.AppInfoCache; import org.wso2.carbon.identity.oauth.cache.AuthorizationGrantCache; import org.wso2.carbon.identity.oauth.cache.AuthorizationGrantCacheKey; import org.wso2.carbon.identity.oauth.cache.CacheEntry; import org.wso2.carbon.identity.oauth.common.OAuthConstants; import org.wso2.carbon.identity.oauth.common.exception.InvalidOAuthClientException; import org.wso2.carbon.identity.oauth.config.OAuthServerConfiguration; import org.wso2.carbon.identity.oauth.dao.OAuthAppDAO; import org.wso2.carbon.identity.oauth2.IdentityOAuth2Exception; import org.wso2.carbon.identity.oauth2.ResponseHeader; import org.wso2.carbon.identity.oauth2.dto.OAuth2AccessTokenReqDTO; import org.wso2.carbon.identity.oauth2.dto.OAuth2AccessTokenRespDTO; import org.wso2.carbon.identity.oauth2.token.handlers.clientauth.ClientAuthenticationHandler; import org.wso2.carbon.identity.oauth2.token.handlers.grant.AuthorizationGrantHandler; import org.wso2.carbon.identity.oauth2.util.OAuth2Util; import org.wso2.carbon.identity.openidconnect.IDTokenBuilder; import org.wso2.carbon.utils.CarbonUtils; import java.util.ArrayList; import java.util.Hashtable; import java.util.List; import java.util.Map; public class AccessTokenIssuer { private static AccessTokenIssuer instance; private static Log log = LogFactory.getLog(AccessTokenIssuer.class); private Map<String, AuthorizationGrantHandler> authzGrantHandlers = new Hashtable<String, AuthorizationGrantHandler>(); private List<ClientAuthenticationHandler> clientAuthenticationHandlers = new ArrayList<ClientAuthenticationHandler>(); private AppInfoCache appInfoCache; private AccessTokenIssuer() throws IdentityOAuth2Exception { authzGrantHandlers = OAuthServerConfiguration.getInstance().getSupportedGrantTypes(); clientAuthenticationHandlers = OAuthServerConfiguration.getInstance().getSupportedClientAuthHandlers(); appInfoCache = AppInfoCache.getInstance(); if (appInfoCache != null) { if (log.isDebugEnabled()) { log.debug("Successfully created AppInfoCache under " + OAuthConstants.OAUTH_CACHE_MANAGER); } } else { log.error("Error while creating AppInfoCache"); } } public static AccessTokenIssuer getInstance() throws IdentityOAuth2Exception { CarbonUtils.checkSecurity(); if (instance == null) { synchronized (AccessTokenIssuer.class) { if (instance == null) { instance = new AccessTokenIssuer(); } } } return instance; } public OAuth2AccessTokenRespDTO issue(OAuth2AccessTokenReqDTO tokenReqDTO) throws IdentityException, InvalidOAuthClientException { String grantType = tokenReqDTO.getGrantType(); OAuth2AccessTokenRespDTO tokenRespDTO; AuthorizationGrantHandler authzGrantHandler = authzGrantHandlers.get(grantType); OAuthTokenReqMessageContext tokReqMsgCtx = new OAuthTokenReqMessageContext(tokenReqDTO); // If multiple client authenticaton methods have been used the authorization server must reject the request int authenticatorHandlerIndex = -1; for (int i = 0; i < clientAuthenticationHandlers.size(); i++) { if (clientAuthenticationHandlers.get(i).canAuthenticate(tokReqMsgCtx)) { if (authenticatorHandlerIndex > -1) { log.debug("Multiple Client Authentication Methods used for client id : " + tokenReqDTO.getClientId()); tokenRespDTO = handleError( OAuthConstants.OAuthError.TokenResponse.UNSUPPORTED_CLIENT_AUTHENTICATION_METHOD, "Unsupported Client Authentication Method!", tokenReqDTO); setResponseHeaders(tokReqMsgCtx, tokenRespDTO); return tokenRespDTO; } authenticatorHandlerIndex = i; } } if (authenticatorHandlerIndex < 0 && authzGrantHandler.isConfidentialClient()) { log.debug("Confidential client cannot be authenticated for client id : " + tokenReqDTO.getClientId()); tokenRespDTO = handleError( OAuthConstants.OAuthError.TokenResponse.UNSUPPORTED_CLIENT_AUTHENTICATION_METHOD, "Unsupported Client Authentication Method!", tokenReqDTO); setResponseHeaders(tokReqMsgCtx, tokenRespDTO); return tokenRespDTO; } ClientAuthenticationHandler clientAuthHandler = null; if (authenticatorHandlerIndex > -1) { clientAuthHandler = clientAuthenticationHandlers.get(authenticatorHandlerIndex); } boolean isAuthenticated; if (clientAuthHandler != null) { isAuthenticated = clientAuthHandler.authenticateClient(tokReqMsgCtx); } else { isAuthenticated = true; } // loading the stored application data OAuthAppDO oAuthAppDO = getAppInformation(tokenReqDTO); String applicationName = oAuthAppDO.getApplicationName(); String userName = tokReqMsgCtx.getAuthorizedUser(); if (!authzGrantHandler.isOfTypeApplicationUser()) { tokReqMsgCtx.setAuthorizedUser(oAuthAppDO.getUserName()); tokReqMsgCtx.setTenantID(oAuthAppDO.getTenantId()); } boolean isValidGrant = authzGrantHandler.validateGrant(tokReqMsgCtx); boolean isAuthorized = authzGrantHandler.authorizeAccessDelegation(tokReqMsgCtx); boolean isValidScope = authzGrantHandler.validateScope(tokReqMsgCtx); //boolean isAuthenticated = true; if (!isAuthenticated) { //Do not change this log format as these logs use by external applications log.debug("Client Authentication Failed for client id=" + tokenReqDTO.getClientId() + ", " + "user-name=" + userName + " to application=" + applicationName); tokenRespDTO = handleError(OAuthError.TokenResponse.INVALID_CLIENT, "Client credentials are invalid.", tokenReqDTO); setResponseHeaders(tokReqMsgCtx, tokenRespDTO); return tokenRespDTO; } //boolean isValidGrant = true; if (!isValidGrant) { //Do not change this log format as these logs use by external applications log.debug("Invalid Grant provided by the client, id=" + tokenReqDTO.getClientId() + ", " + "" + "user-name=" + userName + " to application=" + applicationName); tokenRespDTO = handleError(OAuthError.TokenResponse.INVALID_GRANT, "Provided Authorization Grant is invalid.", tokenReqDTO); setResponseHeaders(tokReqMsgCtx, tokenRespDTO); return tokenRespDTO; } //boolean isAuthorized = true; if (!isAuthorized) { //Do not change this log format as these logs use by external applications log.debug("Resource owner is not authorized to grant access, client-id=" + tokenReqDTO.getClientId() + " " + "user-name=" + userName + " to application=" + applicationName); tokenRespDTO = handleError(OAuthError.TokenResponse.UNAUTHORIZED_CLIENT, "Unauthorized Client!", tokenReqDTO); setResponseHeaders(tokReqMsgCtx, tokenRespDTO); return tokenRespDTO; } //boolean isValidScope = true; if (!isValidScope) { //Do not change this log format as these logs use by external applications log.debug("Invalid Scope provided. client-id=" + tokenReqDTO.getClientId() + " " + "" + "user-name=" + userName + " to application=" + applicationName); tokenRespDTO = handleError(OAuthError.TokenResponse.INVALID_SCOPE, "Invalid Scope!", tokenReqDTO); setResponseHeaders(tokReqMsgCtx, tokenRespDTO); return tokenRespDTO; } tokenRespDTO = authzGrantHandler.issue(tokReqMsgCtx); tokenRespDTO.setCallbackURI(oAuthAppDO.getCallbackUrl()); String[] scopes = tokReqMsgCtx.getScope(); if (scopes != null && scopes.length > 0) { StringBuilder scopeString = new StringBuilder(""); for (String scope : scopes) { scopeString.append(scope); scopeString.append(" "); } tokenRespDTO.setAuthorizedScopes(scopeString.toString().trim()); } setResponseHeaders(tokReqMsgCtx, tokenRespDTO); //Do not change this log format as these logs use by external applications if (log.isDebugEnabled()) { log.debug("Access Token issued to client. client-id=" + tokenReqDTO.getClientId() + " " + "" + "user-name=" + userName + " to application=" + applicationName); } if (tokReqMsgCtx.getScope() != null && OIDCAuthzServerUtil.isOIDCAuthzRequest(tokReqMsgCtx.getScope())) { IDTokenBuilder builder = OAuthServerConfiguration.getInstance().getOpenIDConnectIDTokenBuilder(); tokenRespDTO.setIDToken(builder.buildIDToken(tokReqMsgCtx, tokenRespDTO)); } if (tokenReqDTO.getGrantType().equals(GrantType.AUTHORIZATION_CODE.toString())) { addUserAttributesToCache(tokenReqDTO, tokenRespDTO); } return tokenRespDTO; } private void addUserAttributesToCache(OAuth2AccessTokenReqDTO tokenReqDTO, OAuth2AccessTokenRespDTO tokenRespDTO) { AuthorizationGrantCacheKey oldCacheKey = new AuthorizationGrantCacheKey(tokenReqDTO.getAuthorizationCode()); //checking getUserAttributesId vale of cacheKey before retrieve entry from cache as it causes to NPE if (oldCacheKey.getUserAttributesId() != null) { CacheEntry authorizationGrantCacheEntry = AuthorizationGrantCache.getInstance() .getValueFromCache(oldCacheKey); AuthorizationGrantCacheKey newCacheKey = new AuthorizationGrantCacheKey(tokenRespDTO.getAccessToken()); AuthorizationGrantCache.getInstance().addToCache(newCacheKey, authorizationGrantCacheEntry); AuthorizationGrantCache.getInstance().clearCacheEntry(oldCacheKey); } } private OAuthAppDO getAppInformation(OAuth2AccessTokenReqDTO tokenReqDTO) throws IdentityOAuth2Exception, InvalidOAuthClientException { OAuthAppDO oAuthAppDO = appInfoCache.getValueFromCache(tokenReqDTO.getClientId()); if (oAuthAppDO != null) { return oAuthAppDO; } else { oAuthAppDO = new OAuthAppDAO().getAppInformation(tokenReqDTO.getClientId()); appInfoCache.addToCache(tokenReqDTO.getClientId(), oAuthAppDO); return oAuthAppDO; } } private OAuth2AccessTokenRespDTO handleError(String errorCode, String errorMsg, OAuth2AccessTokenReqDTO tokenReqDTO) { if (log.isDebugEnabled()) { log.debug("OAuth-Error-Code=" + errorCode + " client-id=" + tokenReqDTO.getClientId() + " grant-type=" + tokenReqDTO.getGrantType() + " scope=" + OAuth2Util.buildScopeString(tokenReqDTO.getScope())); } OAuth2AccessTokenRespDTO tokenRespDTO; tokenRespDTO = new OAuth2AccessTokenRespDTO(); tokenRespDTO.setError(true); tokenRespDTO.setErrorCode(errorCode); tokenRespDTO.setErrorMsg(errorMsg); return tokenRespDTO; } private void setResponseHeaders(OAuthTokenReqMessageContext tokReqMsgCtx, OAuth2AccessTokenRespDTO tokenRespDTO) { if (tokReqMsgCtx.getProperty("RESPONSE_HEADERS") != null) { tokenRespDTO.setResponseHeaders((ResponseHeader[]) tokReqMsgCtx.getProperty("RESPONSE_HEADERS")); } } }